mirror of
https://github.com/CHN-beta/nixpkgs.git
synced 2026-01-12 19:00:19 +08:00
Compare commits
1 Commits
nixpart
...
staging.pa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1eeb4b3c51 |
@@ -1,24 +0,0 @@
|
||||
# EditorConfig configuration for nixpkgs
|
||||
# http://EditorConfig.org
|
||||
|
||||
# Top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# Unix-style newlines with a newline ending every file, utf-8 charset
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
charset = utf-8
|
||||
|
||||
# see https://nixos.org/nixpkgs/manual/#chap-conventions
|
||||
|
||||
# Match nix/ruby files, set indent to spaces with width of two
|
||||
[*.{nix,rb}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
# Match shell/python/perl scripts, set indent to spaces with width of four
|
||||
[*.{sh,py,pl}]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
35
.github/CONTRIBUTING.md
vendored
35
.github/CONTRIBUTING.md
vendored
@@ -1,35 +0,0 @@
|
||||
# How to contribute
|
||||
|
||||
Note: contributing implies licensing those contributions
|
||||
under the terms of [COPYING](../COPYING), which is an MIT-like license.
|
||||
|
||||
## Opening issues
|
||||
|
||||
* Make sure you have a [GitHub account](https://github.com/signup/free)
|
||||
* [Submit an issue](https://github.com/NixOS/nixpkgs/issues) - assuming one does not already exist.
|
||||
* Clearly describe the issue including steps to reproduce when it is a bug.
|
||||
* Include information what version of nixpkgs and Nix are you using (nixos-version or git revision).
|
||||
|
||||
## Submitting changes
|
||||
|
||||
* Format the commits in the following way:
|
||||
|
||||
`(pkg-name | service-name): (from -> to | init at version | refactor | etc)`
|
||||
|
||||
Examples:
|
||||
|
||||
* nginx: init at 2.0.1
|
||||
* firefox: 3.0 -> 3.1.1
|
||||
* hydra service: add bazBaz option
|
||||
* nginx service: refactor config generation
|
||||
|
||||
* `meta.description` should:
|
||||
* Be capitalized
|
||||
* Not start with the package name
|
||||
* Not have a dot at the end
|
||||
|
||||
See the nixpkgs manual for more details on how to [Submit changes to nixpkgs](https://nixos.org/nixpkgs/manual/#chap-submitting-changes).
|
||||
|
||||
## Reviewing contributions
|
||||
|
||||
See the nixpkgs manual for more details on how to [Review contributions](https://nixos.org/nixpkgs/manual/#sec-reviewing-contributions).
|
||||
13
.github/ISSUE_TEMPLATE.md
vendored
13
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,13 +0,0 @@
|
||||
## Issue description
|
||||
|
||||
|
||||
|
||||
### Steps to reproduce
|
||||
|
||||
|
||||
|
||||
## Technical details
|
||||
|
||||
* System: (NixOS: `nixos-version`, Ubuntu/Fedora: `lsb_release -a`, ...)
|
||||
* Nix version: (run `nix-env --version`)
|
||||
* Nixpkgs version: (run `nix-instantiate --eval '<nixpkgs>' -A lib.nixpkgsVersion`)
|
||||
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,19 +0,0 @@
|
||||
###### Motivation for this change
|
||||
|
||||
|
||||
###### Things done
|
||||
|
||||
- [ ] Tested using sandboxing
|
||||
([nix.useSandbox](http://nixos.org/nixos/manual/options.html#opt-nix.useSandbox) on NixOS,
|
||||
or option `build-use-sandbox` in [`nix.conf`](http://nixos.org/nix/manual/#sec-conf-file)
|
||||
on non-NixOS)
|
||||
- Built on platform(s)
|
||||
- [ ] NixOS
|
||||
- [ ] macOS
|
||||
- [ ] Linux
|
||||
- [ ] Tested compilation of all pkgs that depend on this change using `nix-shell -p nox --run "nox-review wip"`
|
||||
- [ ] Tested execution of all binary files (usually in `./result/bin/`)
|
||||
- [ ] Fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blob/master/.github/CONTRIBUTING.md).
|
||||
|
||||
---
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -10,7 +10,4 @@ result-*
|
||||
/doc/manual.pdf
|
||||
.version-suffix
|
||||
|
||||
.DS_Store
|
||||
|
||||
/pkgs/development/libraries/qt-5/*/tmp/
|
||||
/pkgs/desktops/kde-5/*/tmp/
|
||||
.DS_Store
|
||||
13
.mention-bot
13
.mention-bot
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"userBlacklist": [
|
||||
"civodul",
|
||||
"jhasse",
|
||||
"shlevy"
|
||||
],
|
||||
"alwaysNotifyForPaths": [
|
||||
{ "name": "FRidh", "files": ["pkgs/top-level/python-packages.nix", "pkgs/development/interpreters/python/*", "pkgs/development/python-modules/*" ] },
|
||||
{ "name": "LnL7", "files": ["pkgs/stdenv/darwin/*", "pkgs/os-specific/darwin/*"] },
|
||||
{ "name": "copumpkin", "files": ["pkgs/stdenv/darwin/*", "pkgs/os-specific/darwin/apple-source-releases/*"] }
|
||||
],
|
||||
"fileBlacklist": ["pkgs/top-level/all-packages.nix"]
|
||||
}
|
||||
25
.travis.yml
25
.travis.yml
@@ -1,20 +1,5 @@
|
||||
language: nix
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
sudo: false
|
||||
script:
|
||||
- ./maintainers/scripts/travis-nox-review-pr.sh nixpkgs-verify nixpkgs-manual nixpkgs-tarball nixpkgs-unstable
|
||||
- ./maintainers/scripts/travis-nox-review-pr.sh nixos-options nixos-manual
|
||||
- os: linux
|
||||
sudo: required
|
||||
dist: trusty
|
||||
before_script:
|
||||
- sudo mount -o remount,exec,size=2G,mode=755 /run/user
|
||||
script: ./maintainers/scripts/travis-nox-review-pr.sh nox pr
|
||||
- os: osx
|
||||
osx_image: xcode7.3
|
||||
script: ./maintainers/scripts/travis-nox-review-pr.sh nox pr
|
||||
env:
|
||||
global:
|
||||
- GITHUB_TOKEN=5edaaf1017f691ed34e7f80878f8f5fbd071603f
|
||||
language: python
|
||||
python: "3.4"
|
||||
before_install: ./maintainers/scripts/travis-nox-review-pr.sh nix
|
||||
install: ./maintainers/scripts/travis-nox-review-pr.sh nox
|
||||
script: ./maintainers/scripts/travis-nox-review-pr.sh build
|
||||
|
||||
2
COPYING
2
COPYING
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2003-2016 Eelco Dolstra and the Nixpkgs/NixOS contributors
|
||||
Copyright (c) 2003-2006 Eelco Dolstra
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
|
||||
42
README.md
42
README.md
@@ -1,42 +1,10 @@
|
||||
[<img src="http://nixos.org/logo/nixos-hires.png" width="500px" alt="logo" />](https://nixos.org/nixos)
|
||||
|
||||
[](https://travis-ci.org/NixOS/nixpkgs)
|
||||
[](https://www.codetriage.com/nixos/nixpkgs)
|
||||
|
||||
Nixpkgs is a collection of packages for the [Nix](https://nixos.org/nix/) package
|
||||
manager. It is periodically built and tested by the [hydra](http://hydra.nixos.org/)
|
||||
build daemon as so-called channels. To get channel information via git, add
|
||||
[nixpkgs-channels](https://github.com/NixOS/nixpkgs-channels.git) as a remote:
|
||||
|
||||
```
|
||||
% git remote add channels git://github.com/NixOS/nixpkgs-channels.git
|
||||
```
|
||||
|
||||
For stability and maximum binary package support, it is recommended to maintain
|
||||
custom changes on top of one of the channels, e.g. `nixos-16.09` for the latest
|
||||
release and `nixos-unstable` for the latest successful build of master:
|
||||
|
||||
```
|
||||
% git remote update channels
|
||||
% git rebase channels/nixos-16.09
|
||||
```
|
||||
|
||||
For pull-requests, please rebase onto nixpkgs `master`.
|
||||
|
||||
[NixOS](https://nixos.org/nixos/) linux distribution source code is located inside
|
||||
`nixos/` folder.
|
||||
Nixpkgs is a collection of packages for [Nix](https://nixos.org/nix/) package
|
||||
manager. Nixpkgs also includes [NixOS](https://nixos.org/nixos/) linux distribution source code.
|
||||
|
||||
* [NixOS installation instructions](https://nixos.org/nixos/manual/#ch-installation)
|
||||
* [Documentation (Nix Expression Language chapter)](https://nixos.org/nix/manual/#ch-expression-language)
|
||||
* [Manual (How to write packages for Nix)](https://nixos.org/nixpkgs/manual/)
|
||||
* [Manual (NixOS)](https://nixos.org/nixos/manual/)
|
||||
* [Nix Wiki](https://nixos.org/wiki/) (deprecated, see milestone ["Move the Wiki!"](https://github.com/NixOS/nixpkgs/issues?q=is%3Aopen+is%3Aissue+milestone%3A%22Move+the+wiki%21%22))
|
||||
* [Continuous package builds for unstable/master](https://hydra.nixos.org/jobset/nixos/trunk-combined)
|
||||
* [Continuous package builds for 16.09 release](https://hydra.nixos.org/jobset/nixos/release-16.09)
|
||||
* [Tests for unstable/master](https://hydra.nixos.org/job/nixos/trunk-combined/tested#tabs-constituents)
|
||||
* [Tests for 16.09 release](https://hydra.nixos.org/job/nixos/release-16.09/tested#tabs-constituents)
|
||||
|
||||
Communication:
|
||||
|
||||
* [Mailing list](http://lists.science.uu.nl/mailman/listinfo/nix-dev)
|
||||
* [Continuous build](https://hydra.nixos.org/jobset/nixos/trunk-combined)
|
||||
* [Tests](https://hydra.nixos.org/job/nixos/trunk-combined/tested#tabs-constituents)
|
||||
* [Mailing list](https://lists.science.uu.nl/mailman/listinfo/nix-dev)
|
||||
* [IRC - #nixos on freenode.net](irc://irc.freenode.net/#nixos)
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
let requiredVersion = import ./lib/minver.nix; in
|
||||
if ! builtins ? nixVersion || builtins.compareVersions "1.7" builtins.nixVersion == 1 then
|
||||
|
||||
if ! builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
|
||||
|
||||
abort "This version of Nixpkgs requires Nix >= ${requiredVersion}, please upgrade! See https://nixos.org/wiki/How_to_update_when_Nix_is_too_old_to_evaluate_Nixpkgs"
|
||||
abort "This version of Nixpkgs requires Nix >= 1.7, please upgrade!"
|
||||
|
||||
else
|
||||
|
||||
import ./pkgs/top-level/impure.nix
|
||||
import ./pkgs/top-level/all-packages.nix
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<title>Coding conventions</title>
|
||||
|
||||
|
||||
<section xml:id="sec-syntax"><title>Syntax</title>
|
||||
<section><title>Syntax</title>
|
||||
|
||||
<itemizedlist>
|
||||
|
||||
@@ -169,8 +169,8 @@ stdenv.mkDerivation { ...
|
||||
args: with args; <replaceable>...</replaceable>
|
||||
</programlisting>
|
||||
|
||||
or
|
||||
|
||||
or
|
||||
|
||||
<programlisting>
|
||||
{ stdenv, fetchurl, perl, ... }: <replaceable>...</replaceable>
|
||||
</programlisting>
|
||||
@@ -207,7 +207,7 @@ args.stdenv.mkDerivation (args // {
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="sec-package-naming"><title>Package naming</title>
|
||||
<section><title>Package naming</title>
|
||||
|
||||
<para>In Nixpkgs, there are generally three different names associated with a package:
|
||||
|
||||
@@ -251,13 +251,10 @@ bound to the variable name <varname>e2fsprogs</varname> in
|
||||
|
||||
<listitem><para>The version part of the <literal>name</literal>
|
||||
attribute <emphasis>must</emphasis> start with a digit (following a
|
||||
dash) — e.g., <literal>"hello-0.3.1rc2"</literal>.</para></listitem>
|
||||
|
||||
<listitem><para>If a package is not a release but a commit from a repository, then
|
||||
the version part of the name <emphasis>must</emphasis> be the date of that
|
||||
(fetched) commit. The date must be in <literal>"YYYY-MM-DD"</literal> format.
|
||||
Also append <literal>"unstable"</literal> to the name - e.g.,
|
||||
<literal>"pkgname-unstable-2014-09-23"</literal>.</para></listitem>
|
||||
dash) — e.g., <literal>"hello-0.3-pre-r3910"</literal> instead of
|
||||
<literal>"hello-svn-r3910"</literal>, as the latter would be seen as
|
||||
a package named <literal>hello-svn</literal> by
|
||||
<command>nix-env</command>.</para></listitem>
|
||||
|
||||
<listitem><para>Dashes in the package name should be preserved
|
||||
in new variable names, rather than converted to underscores
|
||||
@@ -289,7 +286,7 @@ dashes between words — not in camel case. For instance, it should be
|
||||
<filename>allPackages.nix</filename> or
|
||||
<filename>AllPackages.nix</filename>.</para>
|
||||
|
||||
<section xml:id="sec-hierarchy"><title>Hierarchy</title>
|
||||
<section><title>Hierarchy</title>
|
||||
|
||||
<para>Each package should be stored in its own directory somewhere in
|
||||
the <filename>pkgs/</filename> tree, i.e. in
|
||||
@@ -448,17 +445,12 @@ splitting up an existing category.</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>If it’s a <emphasis>desktop environment</emphasis>:</term>
|
||||
<term>If it’s a <emphasis>desktop environment</emphasis>
|
||||
(including <emphasis>window managers</emphasis>):</term>
|
||||
<listitem>
|
||||
<para><filename>desktops</filename> (e.g. <filename>kde</filename>, <filename>gnome</filename>, <filename>enlightenment</filename>)</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>If it’s a <emphasis>window manager</emphasis>:</term>
|
||||
<listitem>
|
||||
<para><filename>applications/window-managers</filename> (e.g. <filename>awesome</filename>, <filename>compiz</filename>, <filename>stumpwm</filename>)</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>If it’s an <emphasis>application</emphasis>:</term>
|
||||
<listitem>
|
||||
@@ -606,75 +598,6 @@ evaluate correctly.</para>
|
||||
</section>
|
||||
|
||||
</section>
|
||||
<section xml:id="sec-sources"><title>Fetching Sources</title>
|
||||
<para>There are multiple ways to fetch a package source in nixpkgs. The
|
||||
general guidline is that you should package sources with a high degree of
|
||||
availability. Right now there is only one fetcher which has mirroring
|
||||
support and that is <literal>fetchurl</literal>. Note that you should also
|
||||
prefer protocols which have a corresponding proxy environment variable.
|
||||
</para>
|
||||
<para>You can find many source fetch helpers in <literal>pkgs/build-support/fetch*</literal>.
|
||||
</para>
|
||||
<para>In the file <literal>pkgs/top-level/all-packages.nix</literal> you can
|
||||
find fetch helpers, these have names on the form
|
||||
<literal>fetchFrom*</literal>. The intention of these are to provide
|
||||
snapshot fetches but using the same api as some of the version controlled
|
||||
fetchers from <literal>pkgs/build-support/</literal>. As an example going
|
||||
from bad to good:
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Uses <literal>git://</literal> which won't be proxied.
|
||||
<programlisting>
|
||||
src = fetchgit {
|
||||
url = "git://github.com/NixOS/nix.git";
|
||||
rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae";
|
||||
sha256 = "1cw5fszffl5pkpa6s6wjnkiv6lm5k618s32sp60kvmvpy7a2v9kg";
|
||||
}
|
||||
</programlisting>
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>This is ok, but an archive fetch will still be faster.
|
||||
<programlisting>
|
||||
src = fetchgit {
|
||||
url = "https://github.com/NixOS/nix.git";
|
||||
rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae";
|
||||
sha256 = "1cw5fszffl5pkpa6s6wjnkiv6lm5k618s32sp60kvmvpy7a2v9kg";
|
||||
}
|
||||
</programlisting>
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>Fetches a snapshot archive and you get the rev you want.
|
||||
<programlisting>
|
||||
src = fetchFromGitHub {
|
||||
owner = "NixOS";
|
||||
repo = "nix";
|
||||
rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae";
|
||||
sha256 = "04yri911rj9j19qqqn6m82266fl05pz98inasni0vxr1cf1gdgv9";
|
||||
}
|
||||
</programlisting>
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-patches"><title>Patches</title>
|
||||
<para>Only patches that are unique to <literal>nixpkgs</literal> should be
|
||||
included in <literal>nixpkgs</literal> source.</para>
|
||||
<para>Patches available online should be retrieved using
|
||||
<literal>fetchpatch</literal>.</para>
|
||||
<para>
|
||||
<programlisting>
|
||||
patches = [
|
||||
(fetchpatch {
|
||||
name = "fix-check-for-using-shared-freetype-lib.patch";
|
||||
url = "http://git.ghostscript.com/?p=ghostpdl.git;a=patch;h=8f5d285";
|
||||
sha256 = "1f0k043rng7f0rfl9hhb89qzvvksqmkrikmm38p61yfx51l325xr";
|
||||
})
|
||||
];
|
||||
</programlisting>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
</chapter>
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
<chapter xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="chap-packageconfig">
|
||||
|
||||
<title><filename>~/.nixpkgs/config.nix</filename>: global configuration</title>
|
||||
|
||||
<para>Nix packages can be configured to allow or deny certain options.</para>
|
||||
|
||||
<para>To apply the configuration edit
|
||||
<filename>~/.nixpkgs/config.nix</filename> and set it like
|
||||
|
||||
<programlisting>
|
||||
{
|
||||
allowUnfree = true;
|
||||
}
|
||||
</programlisting>
|
||||
|
||||
and will allow the Nix package manager to install unfree licensed packages.</para>
|
||||
|
||||
<para>The configuration as listed also applies to NixOS under
|
||||
<option>nixpkgs.config</option> set.</para>
|
||||
|
||||
<itemizedlist>
|
||||
|
||||
<listitem>
|
||||
<para>Allow installing of packages that are distributed under
|
||||
unfree license by setting <programlisting>allowUnfree =
|
||||
true;</programlisting> or deny them by setting it to
|
||||
<literal>false</literal>.</para>
|
||||
|
||||
<para>Same can be achieved by setting the environment variable:
|
||||
|
||||
<programlisting>
|
||||
$ export NIXPKGS_ALLOW_UNFREE=1
|
||||
</programlisting>
|
||||
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Whenever unfree packages are not allowed, single packages
|
||||
can still be allowed by a predicate function that accepts package
|
||||
as an argument and should return a boolean:
|
||||
|
||||
<programlisting>
|
||||
allowUnfreePredicate = (pkg: ...);
|
||||
</programlisting>
|
||||
|
||||
Example to allow flash player and visual studio code only:
|
||||
|
||||
<programlisting>
|
||||
allowUnfreePredicate = with builtins; (pkg: elem (parseDrvName pkg.name).name [ "flashplayer" "vscode" ]);
|
||||
</programlisting>
|
||||
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Whenever unfree packages are not allowed, packages can still
|
||||
be whitelisted by their license:
|
||||
|
||||
<programlisting>
|
||||
whitelistedLicenses = with stdenv.lib.licenses; [ amd wtfpl ];
|
||||
</programlisting>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>In addition to whitelisting licenses which are denied by the
|
||||
<literal>allowUnfree</literal> setting, you can also explicitely
|
||||
deny installation of packages which have a certain license:
|
||||
|
||||
<programlisting>
|
||||
blacklistedLicenses = with stdenv.lib.licenses; [ agpl3 gpl3 ];
|
||||
</programlisting>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
</itemizedlist>
|
||||
|
||||
<para>A complete list of licenses can be found in the file
|
||||
<filename>lib/licenses.nix</filename> of the nix package tree.</para>
|
||||
|
||||
|
||||
<!--============================================================-->
|
||||
|
||||
<section xml:id="sec-modify-via-packageOverrides"><title>Modify
|
||||
packages via <literal>packageOverrides</literal></title>
|
||||
|
||||
<para>You can define a function called
|
||||
<varname>packageOverrides</varname> in your local
|
||||
<filename>~/.nixpkgs/config.nix</filename> to overide nix packages. It
|
||||
must be a function that takes pkgs as an argument and return modified
|
||||
set of packages.
|
||||
|
||||
<programlisting>
|
||||
{
|
||||
packageOverrides = pkgs: rec {
|
||||
foo = pkgs.foo.override { ... };
|
||||
};
|
||||
}
|
||||
</programlisting>
|
||||
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
</chapter>
|
||||
@@ -2,19 +2,18 @@
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="chap-contributing">
|
||||
|
||||
<title>Contributing to this documentation</title>
|
||||
<title>Contributing</title>
|
||||
|
||||
<para>The DocBook sources of the Nixpkgs manual are in the <filename
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/tree/master/doc">doc</filename>
|
||||
subdirectory of the Nixpkgs repository. If you make modifications to
|
||||
the manual, it's important to build it before committing. You can do that as follows:
|
||||
<para>If you make modifications to the manual, it's important to build the manual before contributing:</para>
|
||||
|
||||
<screen>
|
||||
$ cd /path/to/nixpkgs
|
||||
$ nix-build doc
|
||||
</screen>
|
||||
<orderedlist>
|
||||
|
||||
If the build succeeds, the manual will be in
|
||||
<filename>./result/share/doc/nixpkgs/manual.html</filename>.</para>
|
||||
<listitem><para><command>$ git clone git://github.com/NixOS/nixpkgs.git</command></para></listitem>
|
||||
|
||||
<listitem><para><command>$ nix-build -A manual nixpkgs/pkgs/top-level/release.nix</command></para></listitem>
|
||||
|
||||
<listitem><para>Inside the built derivation you shall see <literal>manual/index.html</literal> file.</para></listitem>
|
||||
|
||||
</orderedlist>
|
||||
|
||||
</chapter>
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
let
|
||||
pkgs = import ./.. { };
|
||||
lib = pkgs.lib;
|
||||
sources = lib.sourceFilesBySuffices ./. [".xml"];
|
||||
sources-langs = ./languages-frameworks;
|
||||
in
|
||||
pkgs.stdenv.mkDerivation {
|
||||
with import ./.. { };
|
||||
with lib;
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "nixpkgs-manual";
|
||||
|
||||
sources = sourceFilesBySuffices ./. [".xml"];
|
||||
|
||||
buildInputs = with pkgs; [ pandoc libxml2 libxslt zip ];
|
||||
buildInputs = [ libxml2 libxslt ];
|
||||
|
||||
xsltFlags = ''
|
||||
--param section.autolabel 1
|
||||
@@ -20,86 +18,25 @@ pkgs.stdenv.mkDerivation {
|
||||
--param callout.graphics.extension '.gif'
|
||||
'';
|
||||
|
||||
buildCommand = ''
|
||||
ln -s $sources/*.xml . # */
|
||||
|
||||
buildCommand = let toDocbook = { useChapters ? false, inputFile, outputFile }:
|
||||
let
|
||||
extraHeader = ''xmlns="http://docbook.org/ns/docbook" xmlns:xlink="http://www.w3.org/1999/xlink" '';
|
||||
in ''
|
||||
{
|
||||
pandoc '${inputFile}' -w docbook ${lib.optionalString useChapters "--chapters"} \
|
||||
--smart \
|
||||
| sed -e 's|<ulink url=|<link xlink:href=|' \
|
||||
-e 's|</ulink>|</link>|' \
|
||||
-e 's|<sect. id=|<section xml:id=|' \
|
||||
-e 's|</sect[0-9]>|</section>|' \
|
||||
-e '1s| id=| xml:id=|' \
|
||||
-e '1s|\(<[^ ]* \)|\1${extraHeader}|'
|
||||
} > '${outputFile}'
|
||||
'';
|
||||
in
|
||||
echo ${nixpkgsVersion} > .version
|
||||
|
||||
''
|
||||
ln -s '${sources}/'*.xml .
|
||||
mkdir ./languages-frameworks
|
||||
cp -s '${sources-langs}'/* ./languages-frameworks
|
||||
''
|
||||
+ toDocbook {
|
||||
inputFile = ./introduction.md;
|
||||
outputFile = "introduction.xml";
|
||||
useChapters = true;
|
||||
}
|
||||
+ toDocbook {
|
||||
inputFile = ./languages-frameworks/python.md;
|
||||
outputFile = "./languages-frameworks/python.xml";
|
||||
}
|
||||
+ toDocbook {
|
||||
inputFile = ./languages-frameworks/haskell.md;
|
||||
outputFile = "./languages-frameworks/haskell.xml";
|
||||
}
|
||||
+ toDocbook {
|
||||
inputFile = ../pkgs/development/idris-modules/README.md;
|
||||
outputFile = "languages-frameworks/idris.xml";
|
||||
}
|
||||
+ toDocbook {
|
||||
inputFile = ../pkgs/development/node-packages/README.md;
|
||||
outputFile = "languages-frameworks/node.xml";
|
||||
}
|
||||
+ toDocbook {
|
||||
inputFile = ../pkgs/development/r-modules/README.md;
|
||||
outputFile = "languages-frameworks/r.xml";
|
||||
}
|
||||
+ ''
|
||||
echo ${lib.nixpkgsVersion} > .version
|
||||
|
||||
# validate against relaxng schema
|
||||
xmllint --nonet --xinclude --noxincludenode manual.xml --output manual-full.xml
|
||||
${pkgs.jing}/bin/jing ${pkgs.docbook5}/xml/rng/docbook/docbook.rng manual-full.xml
|
||||
xmllint --noout --nonet --xinclude --noxincludenode \
|
||||
--relaxng ${docbook5}/xml/rng/docbook/docbook.rng \
|
||||
manual.xml
|
||||
|
||||
dst=$out/share/doc/nixpkgs
|
||||
mkdir -p $dst
|
||||
xsltproc $xsltFlags --nonet --xinclude \
|
||||
--output $dst/manual.html \
|
||||
${pkgs.docbook5_xsl}/xml/xsl/docbook/xhtml/docbook.xsl \
|
||||
${docbook5_xsl}/xml/xsl/docbook/xhtml/docbook.xsl \
|
||||
./manual.xml
|
||||
|
||||
cp ${./style.css} $dst/style.css
|
||||
|
||||
mkdir -p $dst/images/callouts
|
||||
cp "${pkgs.docbook5_xsl}/xml/xsl/docbook/images/callouts/"*.gif $dst/images/callouts/
|
||||
|
||||
mkdir -p $out/nix-support
|
||||
echo "doc manual $dst manual.html" >> $out/nix-support/hydra-build-products
|
||||
|
||||
xsltproc $xsltFlags --nonet --xinclude \
|
||||
--output $dst/epub/ \
|
||||
${pkgs.docbook5_xsl}/xml/xsl/docbook/epub/docbook.xsl \
|
||||
./manual.xml
|
||||
|
||||
cp -r $dst/images $dst/epub/OEBPS
|
||||
echo "application/epub+zip" > mimetype
|
||||
manual="$dst/nixpkgs-manual.epub"
|
||||
zip -0Xq "$manual" mimetype
|
||||
cd $dst/epub && zip -Xr9D "$manual" *
|
||||
rm -rf $dst/epub
|
||||
'';
|
||||
}
|
||||
|
||||
@@ -1,750 +0,0 @@
|
||||
<chapter xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="chap-functions">
|
||||
|
||||
<title>Functions reference</title>
|
||||
|
||||
<para>
|
||||
The nixpkgs repository has several utility functions to manipulate Nix expressions.
|
||||
</para>
|
||||
|
||||
<section xml:id="sec-overrides">
|
||||
<title>Overriding</title>
|
||||
|
||||
<para>
|
||||
Sometimes one wants to override parts of
|
||||
<literal>nixpkgs</literal>, e.g. derivation attributes, the results of
|
||||
derivations or even the whole package set.
|
||||
</para>
|
||||
|
||||
<section xml:id="sec-pkgs-overridePackages">
|
||||
<title>pkgs.overridePackages</title>
|
||||
|
||||
<para>
|
||||
This function inside the nixpkgs expression (<varname>pkgs</varname>)
|
||||
can be used to override the set of packages itself.
|
||||
</para>
|
||||
<para>
|
||||
Warning: this function is expensive and must not be used from within
|
||||
the nixpkgs repository.
|
||||
</para>
|
||||
<para>
|
||||
Example usage:
|
||||
|
||||
<programlisting>let
|
||||
pkgs = import <nixpkgs> {};
|
||||
newpkgs = pkgs.overridePackages (self: super: {
|
||||
foo = super.foo.override { ... };
|
||||
};
|
||||
in ...</programlisting>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The resulting <varname>newpkgs</varname> will have the new <varname>foo</varname>
|
||||
expression, and all other expressions depending on <varname>foo</varname> will also
|
||||
use the new <varname>foo</varname> expression.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The behavior of this function is similar to <link
|
||||
linkend="sec-modify-via-packageOverrides">config.packageOverrides</link>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <varname>self</varname> parameter refers to the final package set with the
|
||||
applied overrides. Using this parameter may lead to infinite recursion if not
|
||||
used consciously.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <varname>super</varname> parameter refers to the old package set.
|
||||
It's equivalent to <varname>pkgs</varname> in the above example.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Note that in previous versions of nixpkgs, this method replaced any changes from <link
|
||||
linkend="sec-modify-via-packageOverrides">config.packageOverrides</link>,
|
||||
along with that from previous calls if this function was called repeatedly.
|
||||
Now those previous changes will be preserved so this function can be "chained" meaningfully.
|
||||
To recover the old behavior, make sure <varname>config.packageOverrides</varname> is unset,
|
||||
and call this only once off a "freshly" imported nixpkgs:
|
||||
|
||||
<programlisting>let
|
||||
pkgs = import <nixpkgs> { config: {}; };
|
||||
newpkgs = pkgs.overridePackages ...;
|
||||
in ...</programlisting>
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-pkg-override">
|
||||
<title><pkg>.override</title>
|
||||
|
||||
<para>
|
||||
The function <varname>override</varname> is usually available for all the
|
||||
derivations in the nixpkgs expression (<varname>pkgs</varname>).
|
||||
</para>
|
||||
<para>
|
||||
It is used to override the arguments passed to a function.
|
||||
</para>
|
||||
<para>
|
||||
Example usages:
|
||||
|
||||
<programlisting>pkgs.foo.override { arg1 = val1; arg2 = val2; ... }</programlisting>
|
||||
<programlisting>pkgs.overridePackages (self: super: {
|
||||
foo = super.foo.override { barSupport = true ; };
|
||||
})</programlisting>
|
||||
<programlisting>mypkg = pkgs.callPackage ./mypkg.nix {
|
||||
mydep = pkgs.mydep.override { ... };
|
||||
})</programlisting>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In the first example, <varname>pkgs.foo</varname> is the result of a function call
|
||||
with some default arguments, usually a derivation.
|
||||
Using <varname>pkgs.foo.override</varname> will call the same function with
|
||||
the given new arguments.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-pkg-overrideAttrs">
|
||||
<title><pkg>.overrideAttrs</title>
|
||||
|
||||
<para>
|
||||
The function <varname>overrideAttrs</varname> allows overriding the
|
||||
attribute set passed to a <varname>stdenv.mkDerivation</varname> call,
|
||||
producing a new derivation based on the original one.
|
||||
This function is available on all derivations produced by the
|
||||
<varname>stdenv.mkDerivation</varname> function, which is most packages
|
||||
in the nixpkgs expression <varname>pkgs</varname>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Example usage:
|
||||
|
||||
<programlisting>helloWithDebug = pkgs.hello.overrideAttrs (oldAttrs: rec {
|
||||
separateDebugInfo = true;
|
||||
});</programlisting>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In the above example, the <varname>separateDebugInfo</varname> attribute is
|
||||
overriden to be true, thus building debug info for
|
||||
<varname>helloWithDebug</varname>, while all other attributes will be
|
||||
retained from the original <varname>hello</varname> package.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The argument <varname>oldAttrs</varname> is conventionally used to refer to
|
||||
the attr set originally passed to <varname>stdenv.mkDerivation</varname>.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>
|
||||
Note that <varname>separateDebugInfo</varname> is processed only by the
|
||||
<varname>stdenv.mkDerivation</varname> function, not the generated, raw
|
||||
Nix derivation. Thus, using <varname>overrideDerivation</varname> will
|
||||
not work in this case, as it overrides only the attributes of the final
|
||||
derivation. It is for this reason that <varname>overrideAttrs</varname>
|
||||
should be preferred in (almost) all cases to
|
||||
<varname>overrideDerivation</varname>, i.e. to allow using
|
||||
<varname>sdenv.mkDerivation</varname> to process input arguments, as well
|
||||
as the fact that it is easier to use (you can use the same attribute
|
||||
names you see in your Nix code, instead of the ones generated (e.g.
|
||||
<varname>buildInputs</varname> vs <varname>nativeBuildInputs</varname>,
|
||||
and involves less typing.
|
||||
</para>
|
||||
</note>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="sec-pkg-overrideDerivation">
|
||||
<title><pkg>.overrideDerivation</title>
|
||||
|
||||
<warning>
|
||||
<para>You should prefer <varname>overrideAttrs</varname> in almost all
|
||||
cases, see its documentation for the reasons why.
|
||||
<varname>overrideDerivation</varname> is not deprecated and will continue
|
||||
to work, but is less nice to use and does not have as many abilities as
|
||||
<varname>overrideAttrs</varname>.
|
||||
</para>
|
||||
</warning>
|
||||
|
||||
<warning>
|
||||
<para>Do not use this function in Nixpkgs as it evaluates a Derivation
|
||||
before modifying it, which breaks package abstraction and removes
|
||||
error-checking of function arguments. In addition, this
|
||||
evaluation-per-function application incurs a performance penalty,
|
||||
which can become a problem if many overrides are used.
|
||||
It is only intended for ad-hoc customisation, such as in
|
||||
<filename>~/.nixpkgs/config.nix</filename>.
|
||||
</para>
|
||||
</warning>
|
||||
|
||||
<para>
|
||||
The function <varname>overrideDerivation</varname> creates a new derivation
|
||||
based on an existing one by overriding the original's attributes with
|
||||
the attribute set produced by the specified function.
|
||||
This function is available on all
|
||||
derivations defined using the <varname>makeOverridable</varname> function.
|
||||
Most standard derivation-producing functions, such as
|
||||
<varname>stdenv.mkDerivation</varname>, are defined using this
|
||||
function, which means most packages in the nixpkgs expression,
|
||||
<varname>pkgs</varname>, have this function.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Example usage:
|
||||
|
||||
<programlisting>mySed = pkgs.gnused.overrideDerivation (oldAttrs: {
|
||||
name = "sed-4.2.2-pre";
|
||||
src = fetchurl {
|
||||
url = ftp://alpha.gnu.org/gnu/sed/sed-4.2.2-pre.tar.bz2;
|
||||
sha256 = "11nq06d131y4wmf3drm0yk502d2xc6n5qy82cg88rb9nqd2lj41k";
|
||||
};
|
||||
patches = [];
|
||||
});</programlisting>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In the above example, the <varname>name</varname>, <varname>src</varname>,
|
||||
and <varname>patches</varname> of the derivation will be overridden, while
|
||||
all other attributes will be retained from the original derivation.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The argument <varname>oldAttrs</varname> is used to refer to the attribute set of
|
||||
the original derivation.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>
|
||||
A package's attributes are evaluated *before* being modified by
|
||||
the <varname>overrideDerivation</varname> function.
|
||||
For example, the <varname>name</varname> attribute reference
|
||||
in <varname>url = "mirror://gnu/hello/${name}.tar.gz";</varname>
|
||||
is filled-in *before* the <varname>overrideDerivation</varname> function
|
||||
modifies the attribute set. This means that overriding the
|
||||
<varname>name</varname> attribute, in this example, *will not* change the
|
||||
value of the <varname>url</varname> attribute. Instead, we need to override
|
||||
both the <varname>name</varname> *and* <varname>url</varname> attributes.
|
||||
</para>
|
||||
</note>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-lib-makeOverridable">
|
||||
<title>lib.makeOverridable</title>
|
||||
|
||||
<para>
|
||||
The function <varname>lib.makeOverridable</varname> is used to make the result
|
||||
of a function easily customizable. This utility only makes sense for functions
|
||||
that accept an argument set and return an attribute set.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Example usage:
|
||||
|
||||
<programlisting>f = { a, b }: { result = a+b; }
|
||||
c = lib.makeOverridable f { a = 1; b = 2; }</programlisting>
|
||||
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The variable <varname>c</varname> is the value of the <varname>f</varname> function
|
||||
applied with some default arguments. Hence the value of <varname>c.result</varname>
|
||||
is <literal>3</literal>, in this example.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The variable <varname>c</varname> however also has some additional functions, like
|
||||
<link linkend="sec-pkg-override">c.override</link> which can be used to
|
||||
override the default arguments. In this example the value of
|
||||
<varname>(c.override { a = 4; }).result</varname> is 6.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-generators">
|
||||
<title>Generators</title>
|
||||
|
||||
<para>
|
||||
Generators are functions that create file formats from nix
|
||||
data structures, e. g. for configuration files.
|
||||
There are generators available for: <literal>INI</literal>,
|
||||
<literal>JSON</literal> and <literal>YAML</literal>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
All generators follow a similar call interface: <code>generatorName
|
||||
configFunctions data</code>, where <literal>configFunctions</literal> is a
|
||||
set of user-defined functions that format variable parts of the content.
|
||||
They each have common defaults, so often they do not need to be set
|
||||
manually. An example is <code>mkSectionName ? (name: libStr.escape [ "[" "]"
|
||||
] name)</code> from the <literal>INI</literal> generator. It gets the name
|
||||
of a section and returns a sanitized name. The default
|
||||
<literal>mkSectionName</literal> escapes <literal>[</literal> and
|
||||
<literal>]</literal> with a backslash.
|
||||
</para>
|
||||
|
||||
<note><para>Nix store paths can be converted to strings by enclosing a
|
||||
derivation attribute like so: <code>"${drv}"</code>.</para></note>
|
||||
|
||||
<para>
|
||||
Detailed documentation for each generator can be found in
|
||||
<literal>lib/generators.nix</literal>.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="sec-fhs-environments">
|
||||
<title>buildFHSUserEnv</title>
|
||||
|
||||
<para>
|
||||
<function>buildFHSUserEnv</function> provides a way to build and run
|
||||
FHS-compatible lightweight sandboxes. It creates an isolated root with
|
||||
bound <filename>/nix/store</filename>, so its footprint in terms of disk
|
||||
space needed is quite small. This allows one to run software which is hard or
|
||||
unfeasible to patch for NixOS -- 3rd-party source trees with FHS assumptions,
|
||||
games distributed as tarballs, software with integrity checking and/or external
|
||||
self-updated binaries. It uses Linux namespaces feature to create
|
||||
temporary lightweight environments which are destroyed after all child
|
||||
processes exit, without root user rights requirement. Accepted arguments are:
|
||||
</para>
|
||||
|
||||
<variablelist>
|
||||
<varlistentry>
|
||||
<term><literal>name</literal></term>
|
||||
|
||||
<listitem><para>Environment name.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><literal>targetPkgs</literal></term>
|
||||
|
||||
<listitem><para>Packages to be installed for the main host's architecture
|
||||
(i.e. x86_64 on x86_64 installations). Along with libraries binaries are also
|
||||
installed.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><literal>multiPkgs</literal></term>
|
||||
|
||||
<listitem><para>Packages to be installed for all architectures supported by
|
||||
a host (i.e. i686 and x86_64 on x86_64 installations). Only libraries are
|
||||
installed by default.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><literal>extraBuildCommands</literal></term>
|
||||
|
||||
<listitem><para>Additional commands to be executed for finalizing the
|
||||
directory structure.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><literal>extraBuildCommandsMulti</literal></term>
|
||||
|
||||
<listitem><para>Like <literal>extraBuildCommands</literal>, but
|
||||
executed only on multilib architectures.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><literal>extraOutputsToInstall</literal></term>
|
||||
|
||||
<listitem><para>Additional derivation outputs to be linked for both
|
||||
target and multi-architecture packages.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><literal>extraInstallCommands</literal></term>
|
||||
|
||||
<listitem><para>Additional commands to be executed for finalizing the
|
||||
derivation with runner script.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><literal>runScript</literal></term>
|
||||
|
||||
<listitem><para>A command that would be executed inside the sandbox and
|
||||
passed all the command line arguments. It defaults to
|
||||
<literal>bash</literal>.</para></listitem>
|
||||
</varlistentry>
|
||||
</variablelist>
|
||||
|
||||
<para>
|
||||
One can create a simple environment using a <literal>shell.nix</literal>
|
||||
like that:
|
||||
</para>
|
||||
|
||||
<programlisting><![CDATA[
|
||||
{ pkgs ? import <nixpkgs> {} }:
|
||||
|
||||
(pkgs.buildFHSUserEnv {
|
||||
name = "simple-x11-env";
|
||||
targetPkgs = pkgs: (with pkgs;
|
||||
[ udev
|
||||
alsaLib
|
||||
]) ++ (with pkgs.xorg;
|
||||
[ libX11
|
||||
libXcursor
|
||||
libXrandr
|
||||
]);
|
||||
multiPkgs = pkgs: (with pkgs;
|
||||
[ udev
|
||||
alsaLib
|
||||
]);
|
||||
runScript = "bash";
|
||||
}).env
|
||||
]]></programlisting>
|
||||
|
||||
<para>
|
||||
Running <literal>nix-shell</literal> would then drop you into a shell with
|
||||
these libraries and binaries available. You can use this to run
|
||||
closed-source applications which expect FHS structure without hassles:
|
||||
simply change <literal>runScript</literal> to the application path,
|
||||
e.g. <filename>./bin/start.sh</filename> -- relative paths are supported.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-pkgs-dockerTools">
|
||||
<title>pkgs.dockerTools</title>
|
||||
|
||||
<para>
|
||||
<varname>pkgs.dockerTools</varname> is a set of functions for creating and
|
||||
manipulating Docker images according to the
|
||||
<link xlink:href="https://github.com/docker/docker/blob/master/image/spec/v1.md#docker-image-specification-v100">
|
||||
Docker Image Specification v1.0.0
|
||||
</link>. Docker itself is not used to perform any of the operations done by these
|
||||
functions.
|
||||
</para>
|
||||
|
||||
<warning>
|
||||
<para>
|
||||
The <varname>dockerTools</varname> API is unstable and may be subject to
|
||||
backwards-incompatible changes in the future.
|
||||
</para>
|
||||
</warning>
|
||||
|
||||
<section xml:id="ssec-pkgs-dockerTools-buildImage">
|
||||
<title>buildImage</title>
|
||||
|
||||
<para>
|
||||
This function is analogous to the <command>docker build</command> command,
|
||||
in that can used to build a Docker-compatible repository tarball containing
|
||||
a single image with one or multiple layers. As such, the result
|
||||
is suitable for being loaded in Docker with <command>docker load</command>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The parameters of <varname>buildImage</varname> with relative example values are
|
||||
described below:
|
||||
</para>
|
||||
|
||||
<example xml:id='ex-dockerTools-buildImage'><title>Docker build</title>
|
||||
<programlisting>
|
||||
buildImage {
|
||||
name = "redis"; <co xml:id='ex-dockerTools-buildImage-1' />
|
||||
tag = "latest"; <co xml:id='ex-dockerTools-buildImage-2' />
|
||||
|
||||
fromImage = someBaseImage; <co xml:id='ex-dockerTools-buildImage-3' />
|
||||
fromImageName = null; <co xml:id='ex-dockerTools-buildImage-4' />
|
||||
fromImageTag = "latest"; <co xml:id='ex-dockerTools-buildImage-5' />
|
||||
|
||||
contents = pkgs.redis; <co xml:id='ex-dockerTools-buildImage-6' />
|
||||
runAsRoot = '' <co xml:id='ex-dockerTools-buildImage-runAsRoot' />
|
||||
#!${stdenv.shell}
|
||||
mkdir -p /data
|
||||
'';
|
||||
|
||||
config = { <co xml:id='ex-dockerTools-buildImage-8' />
|
||||
Cmd = [ "/bin/redis-server" ];
|
||||
WorkingDir = "/data";
|
||||
Volumes = {
|
||||
"/data" = {};
|
||||
};
|
||||
};
|
||||
}
|
||||
</programlisting>
|
||||
</example>
|
||||
|
||||
<para>The above example will build a Docker image <literal>redis/latest</literal>
|
||||
from the given base image. Loading and running this image in Docker results in
|
||||
<literal>redis-server</literal> being started automatically.
|
||||
</para>
|
||||
|
||||
<calloutlist>
|
||||
<callout arearefs='ex-dockerTools-buildImage-1'>
|
||||
<para>
|
||||
<varname>name</varname> specifies the name of the resulting image.
|
||||
This is the only required argument for <varname>buildImage</varname>.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-dockerTools-buildImage-2'>
|
||||
<para>
|
||||
<varname>tag</varname> specifies the tag of the resulting image.
|
||||
By default it's <literal>latest</literal>.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-dockerTools-buildImage-3'>
|
||||
<para>
|
||||
<varname>fromImage</varname> is the repository tarball containing the base image.
|
||||
It must be a valid Docker image, such as exported by <command>docker save</command>.
|
||||
By default it's <literal>null</literal>, which can be seen as equivalent
|
||||
to <literal>FROM scratch</literal> of a <filename>Dockerfile</filename>.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-dockerTools-buildImage-4'>
|
||||
<para>
|
||||
<varname>fromImageName</varname> can be used to further specify
|
||||
the base image within the repository, in case it contains multiple images.
|
||||
By default it's <literal>null</literal>, in which case
|
||||
<varname>buildImage</varname> will peek the first image available
|
||||
in the repository.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-dockerTools-buildImage-5'>
|
||||
<para>
|
||||
<varname>fromImageTag</varname> can be used to further specify the tag
|
||||
of the base image within the repository, in case an image contains multiple tags.
|
||||
By default it's <literal>null</literal>, in which case
|
||||
<varname>buildImage</varname> will peek the first tag available for the base image.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-dockerTools-buildImage-6'>
|
||||
<para>
|
||||
<varname>contents</varname> is a derivation that will be copied in the new
|
||||
layer of the resulting image. This can be similarly seen as
|
||||
<command>ADD contents/ /</command> in a <filename>Dockerfile</filename>.
|
||||
By default it's <literal>null</literal>.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-dockerTools-buildImage-runAsRoot'>
|
||||
<para>
|
||||
<varname>runAsRoot</varname> is a bash script that will run as root
|
||||
in an environment that overlays the existing layers of the base image with
|
||||
the new resulting layer, including the previously copied
|
||||
<varname>contents</varname> derivation.
|
||||
This can be similarly seen as
|
||||
<command>RUN ...</command> in a <filename>Dockerfile</filename>.
|
||||
|
||||
<note>
|
||||
<para>
|
||||
Using this parameter requires the <literal>kvm</literal>
|
||||
device to be available.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-dockerTools-buildImage-8'>
|
||||
<para>
|
||||
<varname>config</varname> is used to specify the configuration of the
|
||||
containers that will be started off the built image in Docker.
|
||||
The available options are listed in the
|
||||
<link xlink:href="https://github.com/docker/docker/blob/master/image/spec/v1.md#container-runconfig-field-descriptions">
|
||||
Docker Image Specification v1.0.0
|
||||
</link>.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
</calloutlist>
|
||||
|
||||
<para>
|
||||
After the new layer has been created, its closure
|
||||
(to which <varname>contents</varname>, <varname>config</varname> and
|
||||
<varname>runAsRoot</varname> contribute) will be copied in the layer itself.
|
||||
Only new dependencies that are not already in the existing layers will be copied.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
At the end of the process, only one new single layer will be produced and
|
||||
added to the resulting image.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The resulting repository will only list the single image
|
||||
<varname>image/tag</varname>. In the case of <xref linkend='ex-dockerTools-buildImage'/>
|
||||
it would be <varname>redis/latest</varname>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
It is possible to inspect the arguments with which an image was built
|
||||
using its <varname>buildArgs</varname> attribute.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="ssec-pkgs-dockerTools-fetchFromRegistry">
|
||||
<title>pullImage</title>
|
||||
|
||||
<para>
|
||||
This function is analogous to the <command>docker pull</command> command,
|
||||
in that can be used to fetch a Docker image from a Docker registry.
|
||||
Currently only registry <literal>v1</literal> is supported.
|
||||
By default <link xlink:href="https://hub.docker.com/">Docker Hub</link>
|
||||
is used to pull images.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Its parameters are described in the example below:
|
||||
</para>
|
||||
|
||||
<example xml:id='ex-dockerTools-pullImage'><title>Docker pull</title>
|
||||
<programlisting>
|
||||
pullImage {
|
||||
imageName = "debian"; <co xml:id='ex-dockerTools-pullImage-1' />
|
||||
imageTag = "jessie"; <co xml:id='ex-dockerTools-pullImage-2' />
|
||||
imageId = null; <co xml:id='ex-dockerTools-pullImage-3' />
|
||||
sha256 = "1bhw5hkz6chrnrih0ymjbmn69hyfriza2lr550xyvpdrnbzr4gk2"; <co xml:id='ex-dockerTools-pullImage-4' />
|
||||
|
||||
indexUrl = "https://index.docker.io"; <co xml:id='ex-dockerTools-pullImage-5' />
|
||||
registryVersion = "v1";
|
||||
}
|
||||
</programlisting>
|
||||
</example>
|
||||
|
||||
<calloutlist>
|
||||
<callout arearefs='ex-dockerTools-pullImage-1'>
|
||||
<para>
|
||||
<varname>imageName</varname> specifies the name of the image to be downloaded,
|
||||
which can also include the registry namespace (e.g. <literal>library/debian</literal>).
|
||||
This argument is required.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-dockerTools-pullImage-2'>
|
||||
<para>
|
||||
<varname>imageTag</varname> specifies the tag of the image to be downloaded.
|
||||
By default it's <literal>latest</literal>.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-dockerTools-pullImage-3'>
|
||||
<para>
|
||||
<varname>imageId</varname>, if specified this exact image will be fetched, instead
|
||||
of <varname>imageName/imageTag</varname>. However, the resulting repository
|
||||
will still be named <varname>imageName/imageTag</varname>.
|
||||
By default it's <literal>null</literal>.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-dockerTools-pullImage-4'>
|
||||
<para>
|
||||
<varname>sha256</varname> is the checksum of the whole fetched image.
|
||||
This argument is required.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>The checksum is computed on the unpacked directory, not on the final tarball.</para>
|
||||
</note>
|
||||
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-dockerTools-pullImage-5'>
|
||||
<para>
|
||||
In the above example the default values are shown for the variables
|
||||
<varname>indexUrl</varname> and <varname>registryVersion</varname>.
|
||||
Hence by default the Docker.io registry is used to pull the images.
|
||||
</para>
|
||||
</callout>
|
||||
</calloutlist>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="ssec-pkgs-dockerTools-exportImage">
|
||||
<title>exportImage</title>
|
||||
|
||||
<para>
|
||||
This function is analogous to the <command>docker export</command> command,
|
||||
in that can used to flatten a Docker image that contains multiple layers.
|
||||
It is in fact the result of the merge of all the layers of the image.
|
||||
As such, the result is suitable for being imported in Docker
|
||||
with <command>docker import</command>.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>
|
||||
Using this function requires the <literal>kvm</literal>
|
||||
device to be available.
|
||||
</para>
|
||||
</note>
|
||||
|
||||
<para>
|
||||
The parameters of <varname>exportImage</varname> are the following:
|
||||
</para>
|
||||
|
||||
<example xml:id='ex-dockerTools-exportImage'><title>Docker export</title>
|
||||
<programlisting>
|
||||
exportImage {
|
||||
fromImage = someLayeredImage;
|
||||
fromImageName = null;
|
||||
fromImageTag = null;
|
||||
|
||||
name = someLayeredImage.name;
|
||||
}
|
||||
</programlisting>
|
||||
</example>
|
||||
|
||||
<para>
|
||||
The parameters relative to the base image have the same synopsis as
|
||||
described in <xref linkend='ssec-pkgs-dockerTools-buildImage'/>, except that
|
||||
<varname>fromImage</varname> is the only required argument in this case.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <varname>name</varname> argument is the name of the derivation output,
|
||||
which defaults to <varname>fromImage.name</varname>.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section xml:id="ssec-pkgs-dockerTools-shadowSetup">
|
||||
<title>shadowSetup</title>
|
||||
|
||||
<para>
|
||||
This constant string is a helper for setting up the base files for managing
|
||||
users and groups, only if such files don't exist already.
|
||||
It is suitable for being used in a
|
||||
<varname>runAsRoot</varname> <xref linkend='ex-dockerTools-buildImage-runAsRoot'/> script for cases like
|
||||
in the example below:
|
||||
</para>
|
||||
|
||||
<example xml:id='ex-dockerTools-shadowSetup'><title>Shadow base files</title>
|
||||
<programlisting>
|
||||
buildImage {
|
||||
name = "shadow-basic";
|
||||
|
||||
runAsRoot = ''
|
||||
#!${stdenv.shell}
|
||||
${shadowSetup}
|
||||
groupadd -r redis
|
||||
useradd -r -g redis redis
|
||||
mkdir /data
|
||||
chown redis:redis /data
|
||||
'';
|
||||
}
|
||||
</programlisting>
|
||||
</example>
|
||||
|
||||
<para>
|
||||
Creating base files like <literal>/etc/passwd</literal> or
|
||||
<literal>/etc/login.defs</literal> are necessary for shadow-utils to
|
||||
manipulate users and groups.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
</section>
|
||||
|
||||
</chapter>
|
||||
@@ -1,53 +0,0 @@
|
||||
---
|
||||
title: Introduction
|
||||
author: Frederik Rietdijk
|
||||
date: 2015-11-25
|
||||
---
|
||||
|
||||
# Introduction
|
||||
|
||||
The Nix Packages collection (Nixpkgs) is a set of thousands of packages for the
|
||||
[Nix package manager](http://nixos.org/nix/), released under a
|
||||
[permissive MIT/X11 license](https://github.com/NixOS/nixpkgs/blob/master/COPYING).
|
||||
Packages are available for several platforms, and can be used with the Nix
|
||||
package manager on most GNU/Linux distributions as well as NixOS.
|
||||
|
||||
This manual primarily describes how to write packages for the Nix Packages collection
|
||||
(Nixpkgs). Thus it’s mainly for packagers and developers who want to add packages to
|
||||
Nixpkgs. If you like to learn more about the Nix package manager and the Nix
|
||||
expression language, then you are kindly referred to the [Nix manual](http://nixos.org/nix/manual/).
|
||||
|
||||
## Overview of Nixpkgs
|
||||
|
||||
Nix expressions describe how to build packages from source and are collected in
|
||||
the [nixpkgs repository](https://github.com/NixOS/nixpkgs). Also included in the
|
||||
collection are Nix expressions for
|
||||
[NixOS modules](http://nixos.org/nixos/manual/index.html#sec-writing-modules).
|
||||
With these expressions the Nix package manager can build binary packages.
|
||||
|
||||
Packages, including the Nix packages collection, are distributed through
|
||||
[channels](http://nixos.org/nix/manual/#sec-channels). The collection is
|
||||
distributed for users of Nix on non-NixOS distributions through the channel
|
||||
`nixpkgs`. Users of NixOS generally use one of the `nixos-*` channels, e.g.
|
||||
`nixos-16.03`, which includes all packages and modules for the stable NixOS
|
||||
16.03. The purpose of stable NixOS releases are generally only given
|
||||
security updates. More up to date packages and modules are available via the
|
||||
`nixos-unstable` channel.
|
||||
|
||||
Both `nixos-unstable` and `nixpkgs` follow the `master` branch of the Nixpkgs
|
||||
repository, although both do lag the `master` branch by generally
|
||||
[a couple of days](http://howoldis.herokuapp.com/). Updates to a channel are
|
||||
distributed as soon as all tests for that channel pass, e.g.
|
||||
[this table](http://hydra.nixos.org/job/nixpkgs/trunk/unstable#tabs-constituents)
|
||||
shows the status of tests for the `nixpkgs` channel.
|
||||
|
||||
The tests are conducted by a cluster called [Hydra](http://nixos.org/hydra/),
|
||||
which also builds binary packages from the Nix expressions in Nixpkgs for
|
||||
`x86_64-linux`, `i686-linux` and `x86_64-darwin`.
|
||||
The binaries are made available via a [binary cache](https://cache.nixos.org).
|
||||
|
||||
The current Nix expressions of the channels are available in the
|
||||
[`nixpkgs-channels`](https://github.com/NixOS/nixpkgs-channels) repository,
|
||||
which has branches corresponding to the available channels. There is also the
|
||||
[Nixpkgs Monitor](http://monitor.nixos.org) which keeps track of updates
|
||||
and security vulnerabilities.
|
||||
21
doc/introduction.xml
Normal file
21
doc/introduction.xml
Normal file
@@ -0,0 +1,21 @@
|
||||
<chapter xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="chap-introduction">
|
||||
|
||||
<title>Introduction</title>
|
||||
|
||||
<para>This manual tells you how to write packages for the Nix Packages
|
||||
collection (Nixpkgs). Thus it’s for packagers and developers who want
|
||||
to add packages to Nixpkgs. End users are kindly referred to the
|
||||
<link xlink:href="http://hydra.nixos.org/job/nix/trunk/tarball/latest/download-by-type/doc/manual">Nix
|
||||
manual</link>.</para>
|
||||
|
||||
<para>This manual does not describe the syntax and semantics of the
|
||||
Nix expression language, which are given in the Nix manual in the
|
||||
<link
|
||||
xlink:href="http://hydra.nixos.org/job/nix/trunk/tarball/latest/download-by-type/doc/manual/#chap-writing-nix-expressions">chapter
|
||||
on writing Nix expressions</link>. It only describes the facilities
|
||||
provided by Nixpkgs to make writing packages easier, such as the
|
||||
standard build environment (<literal>stdenv</literal>).</para>
|
||||
|
||||
</chapter>
|
||||
812
doc/language-support.xml
Normal file
812
doc/language-support.xml
Normal file
@@ -0,0 +1,812 @@
|
||||
<chapter xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="chap-language-support">
|
||||
|
||||
<title>Support for specific programming languages</title>
|
||||
|
||||
<para>The <link linkend="chap-stdenv">standard build
|
||||
environment</link> makes it easy to build typical Autotools-based
|
||||
packages with very little code. Any other kind of package can be
|
||||
accomodated by overriding the appropriate phases of
|
||||
<literal>stdenv</literal>. However, there are specialised functions
|
||||
in Nixpkgs to easily build packages for other programming languages,
|
||||
such as Perl or Haskell. These are described in this chapter.</para>
|
||||
|
||||
|
||||
<section xml:id="ssec-language-perl"><title>Perl</title>
|
||||
|
||||
<para>Nixpkgs provides a function <varname>buildPerlPackage</varname>,
|
||||
a generic package builder function for any Perl package that has a
|
||||
standard <varname>Makefile.PL</varname>. It’s implemented in <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/perl-modules/generic"><filename>pkgs/development/perl-modules/generic</filename></link>.</para>
|
||||
|
||||
<para>Perl packages from CPAN are defined in <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/perl-packages.nix"><filename>pkgs/top-level/perl-packages.nix</filename></link>,
|
||||
rather than <filename>pkgs/all-packages.nix</filename>. Most Perl
|
||||
packages are so straight-forward to build that they are defined here
|
||||
directly, rather than having a separate function for each package
|
||||
called from <filename>perl-packages.nix</filename>. However, more
|
||||
complicated packages should be put in a separate file, typically in
|
||||
<filename>pkgs/development/perl-modules</filename>. Here is an
|
||||
example of the former:
|
||||
|
||||
<programlisting>
|
||||
ClassC3 = buildPerlPackage rec {
|
||||
name = "Class-C3-0.21";
|
||||
src = fetchurl {
|
||||
url = "mirror://cpan/authors/id/F/FL/FLORA/${name}.tar.gz";
|
||||
sha256 = "1bl8z095y4js66pwxnm7s853pi9czala4sqc743fdlnk27kq94gz";
|
||||
};
|
||||
};
|
||||
</programlisting>
|
||||
|
||||
Note the use of <literal>mirror://cpan/</literal>, and the
|
||||
<literal>${name}</literal> in the URL definition to ensure that the
|
||||
name attribute is consistent with the source that we’re actually
|
||||
downloading. Perl packages are made available in
|
||||
<filename>all-packages.nix</filename> through the variable
|
||||
<varname>perlPackages</varname>. For instance, if you have a package
|
||||
that needs <varname>ClassC3</varname>, you would typically write
|
||||
|
||||
<programlisting>
|
||||
foo = import ../path/to/foo.nix {
|
||||
inherit stdenv fetchurl ...;
|
||||
inherit (perlPackages) ClassC3;
|
||||
};
|
||||
</programlisting>
|
||||
|
||||
in <filename>all-packages.nix</filename>. You can test building a
|
||||
Perl package as follows:
|
||||
|
||||
<screen>
|
||||
$ nix-build -A perlPackages.ClassC3
|
||||
</screen>
|
||||
|
||||
<varname>buildPerlPackage</varname> adds <literal>perl-</literal> to
|
||||
the start of the name attribute, so the package above is actually
|
||||
called <literal>perl-Class-C3-0.21</literal>. So to install it, you
|
||||
can say:
|
||||
|
||||
<screen>
|
||||
$ nix-env -i perl-Class-C3
|
||||
</screen>
|
||||
|
||||
(Of course you can also install using the attribute name:
|
||||
<literal>nix-env -i -A perlPackages.ClassC3</literal>.)</para>
|
||||
|
||||
<para>So what does <varname>buildPerlPackage</varname> do? It does
|
||||
the following:
|
||||
|
||||
<orderedlist>
|
||||
|
||||
<listitem><para>In the configure phase, it calls <literal>perl
|
||||
Makefile.PL</literal> to generate a Makefile. You can set the
|
||||
variable <varname>makeMakerFlags</varname> to pass flags to
|
||||
<filename>Makefile.PL</filename></para></listitem>
|
||||
|
||||
<listitem><para>It adds the contents of the <envar>PERL5LIB</envar>
|
||||
environment variable to <literal>#! .../bin/perl</literal> line of
|
||||
Perl scripts as <literal>-I<replaceable>dir</replaceable></literal>
|
||||
flags. This ensures that a script can find its
|
||||
dependencies.</para></listitem>
|
||||
|
||||
<listitem><para>In the fixup phase, it writes the propagated build
|
||||
inputs (<varname>propagatedBuildInputs</varname>) to the file
|
||||
<filename>$out/nix-support/propagated-user-env-packages</filename>.
|
||||
<command>nix-env</command> recursively installs all packages listed
|
||||
in this file when you install a package that has it. This ensures
|
||||
that a Perl package can find its dependencies.</para></listitem>
|
||||
|
||||
</orderedlist>
|
||||
|
||||
</para>
|
||||
|
||||
<para><varname>buildPerlPackage</varname> is built on top of
|
||||
<varname>stdenv</varname>, so everything can be customised in the
|
||||
usual way. For instance, the <literal>BerkeleyDB</literal> module has
|
||||
a <varname>preConfigure</varname> hook to generate a configuration
|
||||
file used by <filename>Makefile.PL</filename>:
|
||||
|
||||
<programlisting>
|
||||
{ buildPerlPackage, fetchurl, db }:
|
||||
|
||||
buildPerlPackage rec {
|
||||
name = "BerkeleyDB-0.36";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://cpan/authors/id/P/PM/PMQS/${name}.tar.gz";
|
||||
sha256 = "07xf50riarb60l1h6m2dqmql8q5dij619712fsgw7ach04d8g3z1";
|
||||
};
|
||||
|
||||
preConfigure = ''
|
||||
echo "LIB = ${db}/lib" > config.in
|
||||
echo "INCLUDE = ${db}/include" >> config.in
|
||||
'';
|
||||
}
|
||||
</programlisting>
|
||||
|
||||
</para>
|
||||
|
||||
<para>Dependencies on other Perl packages can be specified in the
|
||||
<varname>buildInputs</varname> and
|
||||
<varname>propagatedBuildInputs</varname> attributes. If something is
|
||||
exclusively a build-time dependency, use
|
||||
<varname>buildInputs</varname>; if it’s (also) a runtime dependency,
|
||||
use <varname>propagatedBuildInputs</varname>. For instance, this
|
||||
builds a Perl module that has runtime dependencies on a bunch of other
|
||||
modules:
|
||||
|
||||
<programlisting>
|
||||
ClassC3Componentised = buildPerlPackage rec {
|
||||
name = "Class-C3-Componentised-1.0004";
|
||||
src = fetchurl {
|
||||
url = "mirror://cpan/authors/id/A/AS/ASH/${name}.tar.gz";
|
||||
sha256 = "0xql73jkcdbq4q9m0b0rnca6nrlvf5hyzy8is0crdk65bynvs8q1";
|
||||
};
|
||||
propagatedBuildInputs = [
|
||||
ClassC3 ClassInspector TestException MROCompat
|
||||
];
|
||||
};
|
||||
</programlisting>
|
||||
|
||||
</para>
|
||||
|
||||
<section><title>Generation from CPAN</title>
|
||||
|
||||
<para>Nix expressions for Perl packages can be generated (almost)
|
||||
automatically from CPAN. This is done by the program
|
||||
<command>nix-generate-from-cpan</command>, which can be installed
|
||||
as follows:</para>
|
||||
|
||||
<screen>
|
||||
$ nix-env -i nix-generate-from-cpan
|
||||
</screen>
|
||||
|
||||
<para>This program takes a Perl module name, looks it up on CPAN,
|
||||
fetches and unpacks the corresponding package, and prints a Nix
|
||||
expression on standard output. For example:
|
||||
|
||||
<screen>
|
||||
$ nix-generate-from-cpan XML::Simple
|
||||
XMLSimple = buildPerlPackage {
|
||||
name = "XML-Simple-2.20";
|
||||
src = fetchurl {
|
||||
url = mirror://cpan/authors/id/G/GR/GRANTM/XML-Simple-2.20.tar.gz;
|
||||
sha256 = "5cff13d0802792da1eb45895ce1be461903d98ec97c9c953bc8406af7294434a";
|
||||
};
|
||||
propagatedBuildInputs = [ XMLNamespaceSupport XMLSAX XMLSAXExpat ];
|
||||
meta = {
|
||||
description = "Easily read/write XML (esp config files)";
|
||||
license = "perl";
|
||||
};
|
||||
};
|
||||
</screen>
|
||||
|
||||
The output can be pasted into
|
||||
<filename>pkgs/top-level/perl-packages.nix</filename> or wherever else
|
||||
you need it.</para>
|
||||
|
||||
</section>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="python"><title>Python</title>
|
||||
|
||||
<para>
|
||||
Currently supported interpreters are <varname>python26</varname>, <varname>python27</varname>,
|
||||
<varname>python32</varname>, <varname>python33</varname>, <varname>python34</varname>
|
||||
and <varname>pypy</varname>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
<varname>python</varname> is an alias of <varname>python27</varname> and <varname>python3</varname> is an alias of <varname>python34</varname>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
<varname>python26</varname> and <varname>python27</varname> do not include modules that require
|
||||
external dependencies (to reduce dependency bloat). Following modules need to be added as
|
||||
<varname>buildInput</varname> explicitly:
|
||||
</para>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem><para><varname>python.modules.bsddb</varname></para></listitem>
|
||||
<listitem><para><varname>python.modules.curses</varname></para></listitem>
|
||||
<listitem><para><varname>python.modules.curses_panel</varname></para></listitem>
|
||||
<listitem><para><varname>python.modules.crypt</varname></para></listitem>
|
||||
<listitem><para><varname>python.modules.gdbm</varname></para></listitem>
|
||||
<listitem><para><varname>python.modules.sqlite3</varname></para></listitem>
|
||||
<listitem><para><varname>python.modules.tkinter</varname></para></listitem>
|
||||
<listitem><para><varname>python.modules.readline</varname></para></listitem>
|
||||
</itemizedlist>
|
||||
|
||||
<para>For convenience <varname>python27Full</varname> and <varname>python26Full</varname>
|
||||
are provided with all modules included.</para>
|
||||
|
||||
<para>
|
||||
Python packages that
|
||||
use <link xlink:href="http://pypi.python.org/pypi/setuptools/"><literal>setuptools</literal></link> or <literal>distutils</literal>,
|
||||
can be built using the <varname>buildPythonPackage</varname> function as documented below.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
All packages depending on any Python interpreter get appended <varname>$out/${python.libPrefix}/site-packages</varname>
|
||||
to <literal>$PYTHONPATH</literal> if such directory exists.
|
||||
</para>
|
||||
|
||||
<variablelist>
|
||||
<title>
|
||||
Useful attributes on interpreters packages:
|
||||
</title>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>libPrefix</varname></term>
|
||||
<listitem><para>
|
||||
Name of the folder in <literal>${python}/lib/</literal> for corresponding interpreter.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>interpreter</varname></term>
|
||||
<listitem><para>
|
||||
Alias for <literal>${python}/bin/${executable}.</literal>
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>buildEnv</varname></term>
|
||||
<listitem><para>
|
||||
Function to build python interpreter environments with extra packages bundled together.
|
||||
See <xref linkend="python-build-env" /> for usage and documentation.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>sitePackages</varname></term>
|
||||
<listitem><para>
|
||||
Alias for <literal>lib/${libPrefix}/site-packages</literal>.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>executable</varname></term>
|
||||
<listitem><para>
|
||||
Name of the interpreter executable, ie <literal>python3.4</literal>.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
</variablelist>
|
||||
<section xml:id="build-python-package"><title><varname>buildPythonPackage</varname> function</title>
|
||||
|
||||
<para>
|
||||
The function is implemented in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/python-modules/generic/default.nix">
|
||||
<filename>pkgs/development/python-modules/generic/default.nix</filename></link>.
|
||||
Example usage:
|
||||
|
||||
<programlisting language="nix">
|
||||
twisted = buildPythonPackage {
|
||||
name = "twisted-8.1.0";
|
||||
|
||||
src = pkgs.fetchurl {
|
||||
url = http://tmrc.mit.edu/mirror/twisted/Twisted/8.1/Twisted-8.1.0.tar.bz2;
|
||||
sha256 = "0q25zbr4xzknaghha72mq57kh53qw1bf8csgp63pm9sfi72qhirl";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [ self.ZopeInterface ];
|
||||
|
||||
meta = {
|
||||
homepage = http://twistedmatrix.com/;
|
||||
description = "Twisted, an event-driven networking engine written in Python";
|
||||
license = stdenv.lib.licenses.mit;
|
||||
};
|
||||
};
|
||||
</programlisting>
|
||||
|
||||
Most of Python packages that use <varname>buildPythonPackage</varname> are defined
|
||||
in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/python-packages.nix"><filename>pkgs/top-level/python-packages.nix</filename></link>
|
||||
and generated for each python interpreter separately into attribute sets <varname>python26Packages</varname>,
|
||||
<varname>python27Packages</varname>, <varname>python32Packages</varname>, <varname>python33Packages</varname>,
|
||||
<varname>python34Packages</varname> and <varname>pypyPackages</varname>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
<function>buildPythonPackage</function> mainly does four things:
|
||||
|
||||
<orderedlist>
|
||||
<listitem><para>
|
||||
In the <varname>configurePhase</varname>, it patches
|
||||
<literal>setup.py</literal> to always include setuptools before
|
||||
distutils for monkeypatching machinery to take place.
|
||||
</para></listitem>
|
||||
|
||||
<listitem><para>
|
||||
In the <varname>buildPhase</varname>, it calls
|
||||
<literal>${python.interpreter} setup.py build ...</literal>
|
||||
</para></listitem>
|
||||
|
||||
<listitem><para>
|
||||
In the <varname>installPhase</varname>, it calls
|
||||
<literal>${python.interpreter} setup.py install ...</literal>
|
||||
</para></listitem>
|
||||
|
||||
<listitem><para>
|
||||
In the <varname>postFixup</varname> phase, <literal>wrapPythonPrograms</literal>
|
||||
bash function is called to wrap all programs in <filename>$out/bin/*</filename>
|
||||
directory to include <literal>$PYTHONPATH</literal> and <literal>$PATH</literal>
|
||||
environment variables.
|
||||
</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
|
||||
<para>By default <varname>doCheck = true</varname> is set and tests are run with
|
||||
<literal>${python.interpreter} setup.py test</literal> command in <varname>checkPhase</varname>.</para>
|
||||
|
||||
<para><varname>propagatedBuildInputs</varname> packages are propagated to user environment.</para>
|
||||
|
||||
<para>
|
||||
By default <varname>meta.platforms</varname> is set to the same value
|
||||
as the interpreter unless overriden otherwise.
|
||||
</para>
|
||||
|
||||
<variablelist>
|
||||
<title>
|
||||
<varname>buildPythonPackage</varname> parameters
|
||||
(all parameters from <varname>mkDerivation</varname> function are still supported)
|
||||
</title>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>namePrefix</varname></term>
|
||||
<listitem><para>
|
||||
Prepended text to <varname>${name}</varname> parameter.
|
||||
Defaults to <literal>"python3.3-"</literal> for Python 3.3, etc. Set it to
|
||||
<literal>""</literal>
|
||||
if you're packaging an application or a command line tool.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>disabled</varname></term>
|
||||
<listitem><para>
|
||||
If <varname>true</varname>, package is not build for
|
||||
particular python interpreter version. Grep around
|
||||
<filename>pkgs/top-level/python-packages.nix</filename>
|
||||
for examples.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>setupPyInstallFlags</varname></term>
|
||||
<listitem><para>
|
||||
List of flags passed to <command>setup.py install</command> command.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>setupPyBuildFlags</varname></term>
|
||||
<listitem><para>
|
||||
List of flags passed to <command>setup.py build</command> command.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>pythonPath</varname></term>
|
||||
<listitem><para>
|
||||
List of packages to be added into <literal>$PYTHONPATH</literal>.
|
||||
Packages in <varname>pythonPath</varname> are not propagated into user environment
|
||||
(contrary to <varname>propagatedBuildInputs</varname>).
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>preShellHook</varname></term>
|
||||
<listitem><para>
|
||||
Hook to execute commands before <varname>shellHook</varname>.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>postShellHook</varname></term>
|
||||
<listitem><para>
|
||||
Hook to execute commands after <varname>shellHook</varname>.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>distutilsExtraCfg</varname></term>
|
||||
<listitem><para>
|
||||
Extra lines passed to <varname>[easy_install]</varname> section of
|
||||
<filename>distutils.cfg</filename> (acts as global setup.cfg
|
||||
configuration).
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
</variablelist>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="python-build-env"><title><function>python.buildEnv</function> function</title>
|
||||
<para>
|
||||
Create Python environments using low-level <function>pkgs.buildEnv</function> function. Example <filename>default.nix</filename>:
|
||||
|
||||
<programlisting language="nix">
|
||||
<![CDATA[
|
||||
with import <nixpkgs> {};
|
||||
|
||||
python.buildEnv.override {
|
||||
extraLibs = [ pkgs.pythonPackages.pyramid ];
|
||||
ignoreCollisions = true;
|
||||
}
|
||||
]]>
|
||||
</programlisting>
|
||||
|
||||
Running <command>nix-build</command> will create
|
||||
<filename>/nix/store/cf1xhjwzmdki7fasgr4kz6di72ykicl5-python-2.7.8-env</filename>
|
||||
with wrapped binaries in <filename>bin/</filename>.
|
||||
</para>
|
||||
|
||||
<variablelist>
|
||||
<title>
|
||||
<function>python.buildEnv</function> arguments
|
||||
</title>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>extraLibs</varname></term>
|
||||
<listitem><para>
|
||||
List of packages installed inside the environment.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>postBuild</varname></term>
|
||||
<listitem><para>
|
||||
Shell command executed after the build of environment.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>ignoreCollisions</varname></term>
|
||||
<listitem><para>
|
||||
Ignore file collisions inside the environment (default is <varname>false</varname>).
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
</variablelist>
|
||||
</section>
|
||||
|
||||
<section xml:id="python-tools"><title>Tools</title>
|
||||
|
||||
<para>Packages inside nixpkgs are written by hand. However many tools
|
||||
exist in community to help save time. No tool is preferred at the moment.
|
||||
</para>
|
||||
|
||||
<itemizedlist>
|
||||
|
||||
<listitem><para>
|
||||
<link xlink:href="https://github.com/proger/python2nix">python2nix</link>
|
||||
by Vladimir Kirillov
|
||||
</para></listitem>
|
||||
|
||||
<listitem><para>
|
||||
<link xlink:href="https://github.com/garbas/pypi2nix">pypi2nix</link>
|
||||
by Rok Garbas
|
||||
</para></listitem>
|
||||
|
||||
<listitem><para>
|
||||
<link xlink:href="https://github.com/offlinehacker/pypi2nix">pypi2nix</link>
|
||||
by Jaka Hudoklin
|
||||
</para></listitem>
|
||||
|
||||
</itemizedlist>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="python-development"><title>Development</title>
|
||||
|
||||
<para>
|
||||
To develop Python packages <function>buildPythonPackage</function> has
|
||||
additional logic inside <varname>shellPhase</varname> to run
|
||||
<command>${python.interpreter} setup.py develop</command> for the package.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Given a <filename>default.nix</filename>:
|
||||
|
||||
<programlisting language="nix">
|
||||
<![CDATA[
|
||||
with import <nixpkgs> {};
|
||||
|
||||
buildPythonPackage {
|
||||
name = "myproject";
|
||||
|
||||
buildInputs = with pkgs.pythonPackages; [ pyramid ];
|
||||
|
||||
src = ./.;
|
||||
}
|
||||
]]>
|
||||
</programlisting>
|
||||
|
||||
Running <command>nix-shell</command> with no arguments should give you
|
||||
the environment in which the package would be build with
|
||||
<command>nix-build</command>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Shortcut to setup environments with C headers/libraries and python packages:
|
||||
|
||||
<programlisting language="bash">$ nix-shell -p pythonPackages.pyramid zlib libjpeg git</programlisting>
|
||||
</para>
|
||||
|
||||
<note><para>
|
||||
There is a boolean value <varname>lib.inNixShell</varname> set to
|
||||
<varname>true</varname> if nix-shell is invoked.
|
||||
</para></note>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="python-faq"><title>FAQ</title>
|
||||
|
||||
<variablelist>
|
||||
|
||||
<varlistentry>
|
||||
<term>How to solve circular dependencies?</term>
|
||||
<listitem><para>
|
||||
If you have packages <varname>A</varname> and <varname>B</varname> that
|
||||
depend on each other, when packaging <varname>B</varname> override package
|
||||
<varname>A</varname> not to depend on <varname>B</varname> as input
|
||||
(and also the other way around).
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>install_data / data_files</varname> problems resulting into <literal>error: could not create '/nix/store/6l1bvljpy8gazlsw2aw9skwwp4pmvyxw-python-2.7.8/etc': Permission denied</literal></term>
|
||||
<listitem><para>
|
||||
<link xlink:href="https://bitbucket.org/pypa/setuptools/issue/130/install_data-doesnt-respect-prefix">
|
||||
Known bug in setuptools <varname>install_data</varname> does not respect --prefix</link>. Example of
|
||||
such package using the feature is <filename>pkgs/tools/X11/xpra/default.nix</filename>. As workaround
|
||||
install it as an extra <varname>preInstall</varname> step:
|
||||
|
||||
<programlisting>${python.interpreter} setup.py install_data --install-dir=$out --root=$out
|
||||
sed -i '/ = data_files/d' setup.py</programlisting>
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term>Rationale of non-existent global site-packages</term>
|
||||
<listitem><para>
|
||||
There is no need to have global site-packages in Nix. Each package has isolated
|
||||
dependency tree and installing any python package will only populate <varname>$PATH</varname>
|
||||
inside user environment. See <xref linkend="python-build-env" /> to create self-contained
|
||||
interpreter with a set of packages.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
</variablelist>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="python-contrib"><title>Contributing guidelines</title>
|
||||
<para>
|
||||
Following rules are desired to be respected:
|
||||
</para>
|
||||
|
||||
<itemizedlist>
|
||||
|
||||
<listitem><para>
|
||||
Make sure package builds for all python interpreters. Use <varname>disabled</varname> argument to
|
||||
<function>buildPythonPackage</function> to set unsupported interpreters.
|
||||
</para></listitem>
|
||||
|
||||
<listitem><para>
|
||||
If tests need to be disabled for a package, make sure you leave a comment about reasoning.
|
||||
</para></listitem>
|
||||
|
||||
<listitem><para>
|
||||
Packages in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/python-packages.nix"><filename>pkgs/top-level/python-packages.nix</filename></link>
|
||||
are sorted quasi-alphabetically to avoid merge conflicts.
|
||||
</para></listitem>
|
||||
|
||||
</itemizedlist>
|
||||
|
||||
</section>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="ssec-language-ruby"><title>Ruby</title>
|
||||
<para>For example, to package yajl-ruby package, use gem-nix:</para>
|
||||
|
||||
<screen>
|
||||
$ nix-env -i gem-nix
|
||||
$ gem-nix --no-user-install --nix-file=pkgs/development/interpreters/ruby/generated.nix yajl-ruby
|
||||
$ nix-build -A rubyLibs.yajl-ruby
|
||||
</screen>
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="ssec-language-go"><title>Go</title>
|
||||
<para>To extract dependency information from a Go package in automated way use <link xlink:href="https://github.com/cstrahan/go2nix">go2nix</link>.</para>
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="ssec-language-java"><title>Java</title>
|
||||
|
||||
<para>Ant-based Java packages are typically built from source as follows:
|
||||
|
||||
<programlisting>
|
||||
stdenv.mkDerivation {
|
||||
name = "...";
|
||||
src = fetchurl { ... };
|
||||
|
||||
buildInputs = [ jdk ant ];
|
||||
|
||||
buildPhase = "ant";
|
||||
}
|
||||
</programlisting>
|
||||
|
||||
Note that <varname>jdk</varname> is an alias for the OpenJDK.</para>
|
||||
|
||||
<para>JAR files that are intended to be used by other packages should
|
||||
be installed in <filename>$out/share/java</filename>. The OpenJDK has
|
||||
a stdenv setup hook that adds any JARs in the
|
||||
<filename>share/java</filename> directories of the build inputs to the
|
||||
<envar>CLASSPATH</envar> environment variable. For instance, if the
|
||||
package <literal>libfoo</literal> installs a JAR named
|
||||
<filename>foo.jar</filename> in its <filename>share/java</filename>
|
||||
directory, and another package declares the attribute
|
||||
|
||||
<programlisting>
|
||||
buildInputs = [ jdk libfoo ];
|
||||
</programlisting>
|
||||
|
||||
then <envar>CLASSPATH</envar> will be set to
|
||||
<filename>/nix/store/...-libfoo/share/java/foo.jar</filename>.</para>
|
||||
|
||||
<para>Private JARs
|
||||
should be installed in a location like
|
||||
<filename>$out/share/<replaceable>package-name</replaceable></filename>.</para>
|
||||
|
||||
<para>If your Java package provides a program, you need to generate a
|
||||
wrapper script to run it using the OpenJRE. You can use
|
||||
<literal>makeWrapper</literal> for this:
|
||||
|
||||
<programlisting>
|
||||
buildInputs = [ makeWrapper ];
|
||||
|
||||
installPhase =
|
||||
''
|
||||
mkdir -p $out/bin
|
||||
makeWrapper ${jre}/bin/java $out/bin/foo \
|
||||
--add-flags "-cp $out/share/java/foo.jar org.foo.Main"
|
||||
'';
|
||||
</programlisting>
|
||||
|
||||
Note the use of <literal>jre</literal>, which is the part of the
|
||||
OpenJDK package that contains the Java Runtime Environment. By using
|
||||
<literal>${jre}/bin/java</literal> instead of
|
||||
<literal>${jdk}/bin/java</literal>, you prevent your package from
|
||||
depending on the JDK at runtime.</para>
|
||||
|
||||
<para>It is possible to use a different Java compiler than
|
||||
<command>javac</command> from the OpenJDK. For instance, to use the
|
||||
Eclipse Java Compiler:
|
||||
|
||||
<programlisting>
|
||||
buildInputs = [ jre ant ecj ];
|
||||
</programlisting>
|
||||
|
||||
(Note that here you don’t need the full JDK as an input, but just the
|
||||
JRE.) The ECJ has a stdenv setup hook that sets some environment
|
||||
variables to cause Ant to use ECJ, but this doesn’t work with all Ant
|
||||
files. Similarly, you can use the GNU Java Compiler:
|
||||
|
||||
<programlisting>
|
||||
buildInputs = [ gcj ant ];
|
||||
</programlisting>
|
||||
|
||||
Here, Ant will automatically use <command>gij</command> (the GNU Java
|
||||
Runtime) instead of the OpenJRE.</para>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="ssec-language-lua"><title>Lua</title>
|
||||
|
||||
<para>
|
||||
Lua packages are built by the <varname>buildLuaPackage</varname> function. This function is
|
||||
implemented
|
||||
in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/lua-modules/generic/default.nix">
|
||||
<filename>pkgs/development/lua-modules/generic/default.nix</filename></link>
|
||||
and works similarly to <varname>buildPerlPackage</varname>. (See
|
||||
<xref linkend="ssec-language-perl"/> for details.)
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Lua packages are defined
|
||||
in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/lua-packages.nix"><filename>pkgs/top-level/lua-packages.nix</filename></link>.
|
||||
Most of them are simple. For example:
|
||||
|
||||
<programlisting>
|
||||
fileSystem = buildLuaPackage {
|
||||
name = "filesystem-1.6.2";
|
||||
src = fetchurl {
|
||||
url = "https://github.com/keplerproject/luafilesystem/archive/v1_6_2.tar.gz";
|
||||
sha256 = "1n8qdwa20ypbrny99vhkmx8q04zd2jjycdb5196xdhgvqzk10abz";
|
||||
};
|
||||
meta = {
|
||||
homepage = "https://github.com/keplerproject/luafilesystem";
|
||||
hydraPlatforms = stdenv.lib.platforms.linux;
|
||||
maintainers = with maintainers; [ flosse ];
|
||||
};
|
||||
};
|
||||
</programlisting>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Though, more complicated package should be placed in a seperate file in
|
||||
<link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/lua-modules"><filename>pkgs/development/lua-modules</filename></link>.
|
||||
</para>
|
||||
<para>
|
||||
Lua packages accept additional parameter <varname>disabled</varname>, which defines
|
||||
the condition of disabling package from luaPackages. For example, if package has
|
||||
<varname>disabled</varname> assigned to <literal>lua.luaversion != "5.1"</literal>,
|
||||
it will not be included in any luaPackages except lua51Packages, making it
|
||||
only be built for lua 5.1.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="ssec-language-coq"><title>Coq</title>
|
||||
<para>
|
||||
Coq libraries should be installed in
|
||||
<literal>$(out)/lib/coq/${coq.coq-version}/user-contrib/</literal>.
|
||||
Such directories are automatically added to the
|
||||
<literal>$COQPATH</literal> environment variable by the hook defined
|
||||
in the Coq derivation.
|
||||
</para>
|
||||
<para>
|
||||
Some libraries require OCaml and sometimes also Camlp5. The exact
|
||||
versions that were used to build Coq are saved in the
|
||||
<literal>coq.ocaml</literal> and <literal>coq.camlp5</literal>
|
||||
attributes.
|
||||
</para>
|
||||
<para>
|
||||
Here is a simple package example. It is a pure Coq library, thus it
|
||||
only depends on Coq. Its <literal>makefile</literal> has been
|
||||
generated using <literal>coq_makefile</literal> so we only have to
|
||||
set the <literal>$COQLIB</literal> variable at install time.
|
||||
</para>
|
||||
<programlisting>
|
||||
{stdenv, fetchurl, coq}:
|
||||
stdenv.mkDerivation {
|
||||
src = fetchurl {
|
||||
url = http://coq.inria.fr/pylons/contribs/files/Karatsuba/v8.4/Karatsuba.tar.gz;
|
||||
sha256 = "0ymfpv4v49k4fm63nq6gcl1hbnnxrvjjp7yzc4973n49b853c5b1";
|
||||
};
|
||||
|
||||
name = "coq-karatsuba";
|
||||
|
||||
buildInputs = [ coq ];
|
||||
|
||||
installFlags = "COQLIB=$(out)/lib/coq/${coq.coq-version}/";
|
||||
}
|
||||
</programlisting>
|
||||
</section>
|
||||
|
||||
<!--
|
||||
<section><title>Haskell</title>
|
||||
|
||||
<para>TODO</para>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
<section><title>TeX / LaTeX</title>
|
||||
|
||||
<para>* Special support for building TeX documents</para>
|
||||
|
||||
</section>
|
||||
-->
|
||||
|
||||
|
||||
</chapter>
|
||||
@@ -1,376 +0,0 @@
|
||||
<section xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="sec-beam">
|
||||
|
||||
<title>Beam Languages (Erlang & Elixir)</title>
|
||||
<section xml:id="beam-introduction">
|
||||
<title>Introduction</title>
|
||||
<para>
|
||||
In this document and related Nix expressions we use the term
|
||||
<emphasis>Beam</emphasis> to describe the environment. Beam is
|
||||
the name of the Erlang Virtial Machine and, as far as we know,
|
||||
from a packaging perspective all languages that run on Beam are
|
||||
interchangable. The things that do change, like the build
|
||||
system, are transperant to the users of the package. So we make
|
||||
no distinction.
|
||||
</para>
|
||||
</section>
|
||||
<section xml:id="build-tools">
|
||||
<title>Build Tools</title>
|
||||
<section xml:id="build-tools-rebar3">
|
||||
<title>Rebar3</title>
|
||||
<para>
|
||||
By default Rebar3 wants to manage it's own dependencies. In the
|
||||
normal non-Nix, this is perfectly acceptable. In the Nix world it
|
||||
is not. To support this we have created two versions of rebar3,
|
||||
<literal>rebar3</literal> and <literal>rebar3-open</literal>. The
|
||||
<literal>rebar3</literal> version has been patched to remove the
|
||||
ability to download anything from it. If you are not running it a
|
||||
nix-shell or a nix-build then its probably not going to work for
|
||||
you. <literal>rebar3-open</literal> is the normal, un-modified
|
||||
rebar3. It should work exactly as would any other version of
|
||||
rebar3. Any Erlang package should rely on
|
||||
<literal>rebar3</literal> and thats really what you should be
|
||||
using too.
|
||||
</para>
|
||||
</section>
|
||||
<section xml:id="build-tools-other">
|
||||
<title>Mix & Erlang.mk</title>
|
||||
<para>
|
||||
Both Mix and Erlang.mk work exactly as you would expect. There
|
||||
is a bootstrap process that needs to be run for both of
|
||||
them. However, that is supported by the
|
||||
<literal>buildMix</literal> and <literal>buildErlangMk</literal> derivations.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="how-to-install-beam-packages">
|
||||
<title>How to install Beam packages</title>
|
||||
<para>
|
||||
Beam packages are not registered in the top level simply because
|
||||
they are not relevant to the vast majority of Nix users. They are
|
||||
installable using the <literal>beamPackages</literal> attribute
|
||||
set.
|
||||
|
||||
You can list the avialable packages in the
|
||||
<literal>beamPackages</literal> with the following command:
|
||||
</para>
|
||||
|
||||
<programlisting>
|
||||
$ nix-env -f "<nixpkgs>" -qaP -A beamPackages
|
||||
beamPackages.esqlite esqlite-0.2.1
|
||||
beamPackages.goldrush goldrush-0.1.7
|
||||
beamPackages.ibrowse ibrowse-4.2.2
|
||||
beamPackages.jiffy jiffy-0.14.5
|
||||
beamPackages.lager lager-3.0.2
|
||||
beamPackages.meck meck-0.8.3
|
||||
beamPackages.rebar3-pc pc-1.1.0
|
||||
</programlisting>
|
||||
<para>
|
||||
To install any of those packages into your profile, refer to them by
|
||||
their attribute path (first column):
|
||||
</para>
|
||||
<programlisting>
|
||||
$ nix-env -f "<nixpkgs>" -iA beamPackages.ibrowse
|
||||
</programlisting>
|
||||
<para>
|
||||
The attribute path of any Beam packages corresponds to the name
|
||||
of that particular package in Hex or its OTP Application/Release name.
|
||||
</para>
|
||||
</section>
|
||||
<section xml:id="packaging-beam-applications">
|
||||
<title>Packaging Beam Applications</title>
|
||||
<section xml:id="packaging-erlang-applications">
|
||||
<title>Erlang Applications</title>
|
||||
<section xml:id="rebar3-packages">
|
||||
<title>Rebar3 Packages</title>
|
||||
<para>
|
||||
There is a Nix functional called
|
||||
<literal>buildRebar3</literal>. We use this function to make a
|
||||
derivation that understands how to build the rebar3 project. For
|
||||
example, the epression we use to build the <link
|
||||
xlink:href="https://github.com/erlang-nix/hex2nix">hex2nix</link>
|
||||
project follows.
|
||||
</para>
|
||||
<programlisting>
|
||||
{stdenv, fetchFromGitHub, buildRebar3, ibrowse, jsx, erlware_commons }:
|
||||
|
||||
buildRebar3 rec {
|
||||
name = "hex2nix";
|
||||
version = "0.0.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "ericbmerritt";
|
||||
repo = "hex2nix";
|
||||
rev = "${version}";
|
||||
sha256 = "1w7xjidz1l5yjmhlplfx7kphmnpvqm67w99hd2m7kdixwdxq0zqg";
|
||||
};
|
||||
|
||||
beamDeps = [ ibrowse jsx erlware_commons ];
|
||||
}
|
||||
</programlisting>
|
||||
<para>
|
||||
The only visible difference between this derivation and
|
||||
something like <literal>stdenv.mkDerivation</literal> is that we
|
||||
have added <literal>erlangDeps</literal> to the derivation. If
|
||||
you add your Beam dependencies here they will be correctly
|
||||
handled by the system.
|
||||
</para>
|
||||
<para>
|
||||
If your package needs to compile native code via Rebar's port
|
||||
compilation mechenism. You should add <literal>compilePort =
|
||||
true;</literal> to the derivation.
|
||||
</para>
|
||||
</section>
|
||||
<section xml:id="erlang-mk-packages">
|
||||
<title>Erlang.mk Packages</title>
|
||||
<para>
|
||||
Erlang.mk functions almost identically to Rebar. The only real
|
||||
difference is that <literal>buildErlangMk</literal> is called
|
||||
instead of <literal>buildRebar3</literal>
|
||||
</para>
|
||||
<programlisting>
|
||||
{ buildErlangMk, fetchHex, cowlib, ranch }:
|
||||
buildErlangMk {
|
||||
name = "cowboy";
|
||||
version = "1.0.4";
|
||||
src = fetchHex {
|
||||
pkg = "cowboy";
|
||||
version = "1.0.4";
|
||||
sha256 =
|
||||
"6a0edee96885fae3a8dd0ac1f333538a42e807db638a9453064ccfdaa6b9fdac";
|
||||
};
|
||||
beamDeps = [ cowlib ranch ];
|
||||
|
||||
meta = {
|
||||
description = ''Small, fast, modular HTTP server written in
|
||||
Erlang.'';
|
||||
license = stdenv.lib.licenses.isc;
|
||||
homepage = "https://github.com/ninenines/cowboy";
|
||||
};
|
||||
}
|
||||
</programlisting>
|
||||
</section>
|
||||
<section xml:id="mix-packages">
|
||||
<title>Mix Packages</title>
|
||||
<para>
|
||||
Mix functions almost identically to Rebar. The only real
|
||||
difference is that <literal>buildMix</literal> is called
|
||||
instead of <literal>buildRebar3</literal>
|
||||
</para>
|
||||
<programlisting>
|
||||
{ buildMix, fetchHex, plug, absinthe }:
|
||||
buildMix {
|
||||
name = "absinthe_plug";
|
||||
version = "1.0.0";
|
||||
src = fetchHex {
|
||||
pkg = "absinthe_plug";
|
||||
version = "1.0.0";
|
||||
sha256 =
|
||||
"08459823fe1fd4f0325a8bf0c937a4520583a5a26d73b193040ab30a1dfc0b33";
|
||||
};
|
||||
beamDeps = [ plug absinthe];
|
||||
|
||||
meta = {
|
||||
description = ''A plug for Absinthe, an experimental GraphQL
|
||||
toolkit'';
|
||||
license = stdenv.lib.licenses.bsd3;
|
||||
homepage = "https://github.com/CargoSense/absinthe_plug";
|
||||
};
|
||||
}
|
||||
</programlisting>
|
||||
</section>
|
||||
</section>
|
||||
</section>
|
||||
<section xml:id="how-to-develop">
|
||||
<title>How to develop</title>
|
||||
<section xml:id="accessing-an-environment">
|
||||
<title>Accessing an Environment</title>
|
||||
<para>
|
||||
Often, all you want to do is be able to access a valid
|
||||
environment that contains a specific package and its
|
||||
dependencies. we can do that with the <literal>env</literal>
|
||||
part of a derivation. For example, lets say we want to access an
|
||||
erlang repl with ibrowse loaded up. We could do the following.
|
||||
</para>
|
||||
<programlisting>
|
||||
~/w/nixpkgs ❯❯❯ nix-shell -A beamPackages.ibrowse.env --run "erl"
|
||||
Erlang/OTP 18 [erts-7.0] [source] [64-bit] [smp:4:4] [async-threads:10] [hipe] [kernel-poll:false]
|
||||
|
||||
Eshell V7.0 (abort with ^G)
|
||||
1> m(ibrowse).
|
||||
Module: ibrowse
|
||||
MD5: 3b3e0137d0cbb28070146978a3392945
|
||||
Compiled: January 10 2016, 23:34
|
||||
Object file: /nix/store/g1rlf65rdgjs4abbyj4grp37ry7ywivj-ibrowse-4.2.2/lib/erlang/lib/ibrowse-4.2.2/ebin/ibrowse.beam
|
||||
Compiler options: [{outdir,"/tmp/nix-build-ibrowse-4.2.2.drv-0/hex-source-ibrowse-4.2.2/_build/default/lib/ibrowse/ebin"},
|
||||
debug_info,debug_info,nowarn_shadow_vars,
|
||||
warn_unused_import,warn_unused_vars,warnings_as_errors,
|
||||
{i,"/tmp/nix-build-ibrowse-4.2.2.drv-0/hex-source-ibrowse-4.2.2/_build/default/lib/ibrowse/include"}]
|
||||
Exports:
|
||||
add_config/1 send_req_direct/7
|
||||
all_trace_off/0 set_dest/3
|
||||
code_change/3 set_max_attempts/3
|
||||
get_config_value/1 set_max_pipeline_size/3
|
||||
get_config_value/2 set_max_sessions/3
|
||||
get_metrics/0 show_dest_status/0
|
||||
get_metrics/2 show_dest_status/1
|
||||
handle_call/3 show_dest_status/2
|
||||
handle_cast/2 spawn_link_worker_process/1
|
||||
handle_info/2 spawn_link_worker_process/2
|
||||
init/1 spawn_worker_process/1
|
||||
module_info/0 spawn_worker_process/2
|
||||
module_info/1 start/0
|
||||
rescan_config/0 start_link/0
|
||||
rescan_config/1 stop/0
|
||||
send_req/3 stop_worker_process/1
|
||||
send_req/4 stream_close/1
|
||||
send_req/5 stream_next/1
|
||||
send_req/6 terminate/2
|
||||
send_req_direct/4 trace_off/0
|
||||
send_req_direct/5 trace_off/2
|
||||
send_req_direct/6 trace_on/0
|
||||
trace_on/2
|
||||
ok
|
||||
2>
|
||||
</programlisting>
|
||||
<para>
|
||||
Notice the <literal>-A beamPackages.ibrowse.env</literal>.That
|
||||
is the key to this functionality.
|
||||
</para>
|
||||
</section>
|
||||
<section xml:id="creating-a-shell">
|
||||
<title>Creating a Shell</title>
|
||||
<para>
|
||||
Getting access to an environment often isn't enough to do real
|
||||
development. Many times we need to create a
|
||||
<literal>shell.nix</literal> file and do our development inside
|
||||
of the environment specified by that file. This file looks a lot
|
||||
like the packaging described above. The main difference is that
|
||||
<literal>src</literal> points to project root and we call the
|
||||
package directly.
|
||||
</para>
|
||||
<programlisting>
|
||||
{ pkgs ? import "<nixpkgs"> {} }:
|
||||
|
||||
with pkgs;
|
||||
|
||||
let
|
||||
|
||||
f = { buildRebar3, ibrowse, jsx, erlware_commons }:
|
||||
buildRebar3 {
|
||||
name = "hex2nix";
|
||||
version = "0.1.0";
|
||||
src = ./.;
|
||||
erlangDeps = [ ibrowse jsx erlware_commons ];
|
||||
};
|
||||
drv = beamPackages.callPackage f {};
|
||||
|
||||
in
|
||||
drv
|
||||
</programlisting>
|
||||
<section xml:id="building-in-a-shell">
|
||||
<title>Building in a shell</title>
|
||||
<para>
|
||||
We can leveral the support of the Derivation, regardless of
|
||||
which build Derivation is called by calling the commands themselv.s
|
||||
</para>
|
||||
<programlisting>
|
||||
# =============================================================================
|
||||
# Variables
|
||||
# =============================================================================
|
||||
|
||||
NIX_TEMPLATES := "$(CURDIR)/nix-templates"
|
||||
|
||||
TARGET := "$(PREFIX)"
|
||||
|
||||
PROJECT_NAME := thorndyke
|
||||
|
||||
NIXPKGS=../nixpkgs
|
||||
NIX_PATH=nixpkgs=$(NIXPKGS)
|
||||
NIX_SHELL=nix-shell -I "$(NIX_PATH)" --pure
|
||||
# =============================================================================
|
||||
# Rules
|
||||
# =============================================================================
|
||||
.PHONY= all test clean repl shell build test analyze configure install \
|
||||
test-nix-install publish plt analyze
|
||||
|
||||
all: build
|
||||
|
||||
guard-%:
|
||||
@ if [ "${${*}}" == "" ]; then \
|
||||
echo "Environment variable $* not set"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
clean:
|
||||
rm -rf _build
|
||||
rm -rf .cache
|
||||
|
||||
repl:
|
||||
$(NIX_SHELL) --run "iex -pa './_build/prod/lib/*/ebin'"
|
||||
|
||||
shell:
|
||||
$(NIX_SHELL)
|
||||
|
||||
configure:
|
||||
$(NIX_SHELL) --command 'eval "$$configurePhase"'
|
||||
|
||||
build: configure
|
||||
$(NIX_SHELL) --command 'eval "$$buildPhase"'
|
||||
|
||||
install:
|
||||
$(NIX_SHELL) --command 'eval "$$installPhase"'
|
||||
|
||||
test:
|
||||
$(NIX_SHELL) --command 'mix test --no-start --no-deps-check'
|
||||
|
||||
plt:
|
||||
$(NIX_SHELL) --run "mix dialyzer.plt --no-deps-check"
|
||||
|
||||
analyze: build plt
|
||||
$(NIX_SHELL) --run "mix dialyzer --no-compile"
|
||||
|
||||
</programlisting>
|
||||
<para>
|
||||
If you add the <literal>shell.nix</literal> as described and
|
||||
user rebar as follows things should simply work. Aside from the
|
||||
<literal>test</literal>, <literal>plt</literal>, and
|
||||
<literal>analyze</literal> the talks work just fine for all of
|
||||
the build Derivations.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
</section>
|
||||
<section xml:id="generating-packages-from-hex-with-hex2nix">
|
||||
<title>Generating Packages from Hex with Hex2Nix</title>
|
||||
<para>
|
||||
Updating the Hex packages requires the use of the
|
||||
<literal>hex2nix</literal> tool. Given the path to the Erlang
|
||||
modules (usually
|
||||
<literal>pkgs/development/erlang-modules</literal>). It will
|
||||
happily dump a file called
|
||||
<literal>hex-packages.nix</literal>. That file will contain all
|
||||
the packages that use a recognized build system in Hex. However,
|
||||
it can't know whether or not all those packages are buildable.
|
||||
</para>
|
||||
<para>
|
||||
To make life easier for our users, it makes good sense to go
|
||||
ahead and attempt to build all those packages and remove the
|
||||
ones that don't build. To do that, simply run the command (in
|
||||
the root of your <literal>nixpkgs</literal> repository). that follows.
|
||||
</para>
|
||||
<programlisting>
|
||||
$ nix-build -A beamPackages
|
||||
</programlisting>
|
||||
<para>
|
||||
That will build every package in
|
||||
<literal>beamPackages</literal>. Then you can go through and
|
||||
manually remove the ones that fail. Hopefully, someone will
|
||||
improve <literal>hex2nix</literal> in the future to automate
|
||||
that.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
@@ -1,244 +0,0 @@
|
||||
<section xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="sec-bower">
|
||||
|
||||
<title>Bower</title>
|
||||
|
||||
<para>
|
||||
<link xlink:href="http://bower.io">Bower</link> is a package manager
|
||||
for web site front-end components. Bower packages (comprising of
|
||||
build artefacts and sometimes sources) are stored in
|
||||
<command>git</command> repositories, typically on Github. The
|
||||
package registry is run by the Bower team with package metadata
|
||||
coming from the <filename>bower.json</filename> file within each
|
||||
package.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The end result of running Bower is a
|
||||
<filename>bower_components</filename> directory which can be included
|
||||
in the web app's build process.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Bower can be run interactively, by installing
|
||||
<varname>nodePackages.bower</varname>. More interestingly, the Bower
|
||||
components can be declared in a Nix derivation, with the help of
|
||||
<varname>nodePackages.bower2nix</varname>.
|
||||
</para>
|
||||
|
||||
<section xml:id="ssec-bower2nix-usage">
|
||||
<title><command>bower2nix</command> usage</title>
|
||||
|
||||
<para>
|
||||
Suppose you have a <filename>bower.json</filename> with the following contents:
|
||||
|
||||
|
||||
<example xml:id="ex-bowerJson"><title><filename>bower.json</filename></title>
|
||||
<programlisting language="json">
|
||||
<![CDATA[{
|
||||
"name": "my-web-app",
|
||||
"dependencies": {
|
||||
"angular": "~1.5.0",
|
||||
"bootstrap": "~3.3.6"
|
||||
}
|
||||
}]]>
|
||||
</programlisting>
|
||||
</example>
|
||||
</para>
|
||||
|
||||
|
||||
<para>
|
||||
Running <command>bower2nix</command> will produce something like the
|
||||
following output:
|
||||
|
||||
<programlisting language="nix">
|
||||
<![CDATA[{ fetchbower, buildEnv }:
|
||||
buildEnv { name = "bower-env"; ignoreCollisions = true; paths = [
|
||||
(fetchbower "angular" "1.5.3" "~1.5.0" "1749xb0firxdra4rzadm4q9x90v6pzkbd7xmcyjk6qfza09ykk9y")
|
||||
(fetchbower "bootstrap" "3.3.6" "~3.3.6" "1vvqlpbfcy0k5pncfjaiskj3y6scwifxygfqnw393sjfxiviwmbv")
|
||||
(fetchbower "jquery" "2.2.2" "1.9.1 - 2" "10sp5h98sqwk90y4k6hbdviwqzvzwqf47r3r51pakch5ii2y7js1")
|
||||
]; }]]>
|
||||
</programlisting>
|
||||
</para>
|
||||
|
||||
|
||||
<para>
|
||||
Using the <command>bower2nix</command> command line arguments, the
|
||||
output can be redirected to a file. A name like
|
||||
<filename>bower-packages.nix</filename> would be fine.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The resulting derivation is a union of all the downloaded Bower
|
||||
packages (and their dependencies). To use it, they still need to be
|
||||
linked together by Bower, which is where
|
||||
<varname>buildBowerComponents</varname> is useful.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section xml:id="ssec-build-bower-components"><title><varname>buildBowerComponents</varname> function</title>
|
||||
|
||||
<para>
|
||||
The function is implemented in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/bower-modules/generic/default.nix">
|
||||
<filename>pkgs/development/bower-modules/generic/default.nix</filename></link>.
|
||||
Example usage:
|
||||
|
||||
<example xml:id="ex-buildBowerComponents"><title>buildBowerComponents</title>
|
||||
<programlisting language="nix">
|
||||
bowerComponents = buildBowerComponents {
|
||||
name = "my-web-app";
|
||||
generated = ./bower-packages.nix; <co xml:id="ex-buildBowerComponents-1" />
|
||||
src = myWebApp; <co xml:id="ex-buildBowerComponents-2" />
|
||||
};
|
||||
</programlisting>
|
||||
</example>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In <xref linkend="ex-buildBowerComponents" />, the following arguments
|
||||
are of special significance to the function:
|
||||
|
||||
<calloutlist>
|
||||
<callout arearefs="ex-buildBowerComponents-1">
|
||||
<para>
|
||||
<varname>generated</varname> specifies the file which was created by <command>bower2nix</command>.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs="ex-buildBowerComponents-2">
|
||||
<para>
|
||||
<varname>src</varname> is your project's sources. It needs to
|
||||
contain a <filename>bower.json</filename> file.
|
||||
</para>
|
||||
</callout>
|
||||
</calloutlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
<varname>buildBowerComponents</varname> will run Bower to link
|
||||
together the output of <command>bower2nix</command>, resulting in a
|
||||
<filename>bower_components</filename> directory which can be used.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here is an example of a web frontend build process using
|
||||
<command>gulp</command>. You might use <command>grunt</command>, or
|
||||
anything else.
|
||||
</para>
|
||||
|
||||
<example xml:id="ex-bowerGulpFile"><title>Example build script (<filename>gulpfile.js</filename>)</title>
|
||||
<programlisting language="javascript">
|
||||
<![CDATA[var gulp = require('gulp');
|
||||
|
||||
gulp.task('default', [], function () {
|
||||
gulp.start('build');
|
||||
});
|
||||
|
||||
gulp.task('build', [], function () {
|
||||
console.log("Just a dummy gulp build");
|
||||
gulp
|
||||
.src(["./bower_components/**/*"])
|
||||
.pipe(gulp.dest("./gulpdist/"));
|
||||
});]]>
|
||||
</programlisting>
|
||||
</example>
|
||||
|
||||
<example xml:id="ex-buildBowerComponentsDefaultNix">
|
||||
<title>Full example — <filename>default.nix</filename></title>
|
||||
<programlisting language="nix">
|
||||
{ myWebApp ? { outPath = ./.; name = "myWebApp"; }
|
||||
, pkgs ? import <nixpkgs> {}
|
||||
}:
|
||||
|
||||
pkgs.stdenv.mkDerivation {
|
||||
name = "my-web-app-frontend";
|
||||
src = myWebApp;
|
||||
|
||||
buildInputs = [ pkgs.nodePackages.gulp ];
|
||||
|
||||
bowerComponents = pkgs.buildBowerComponents { <co xml:id="ex-buildBowerComponentsDefault-1" />
|
||||
name = "my-web-app";
|
||||
generated = ./bower-packages.nix;
|
||||
src = myWebApp;
|
||||
};
|
||||
|
||||
buildPhase = ''
|
||||
cp --reflink=auto --no-preserve=mode -R $bowerComponents/bower_components . <co xml:id="ex-buildBowerComponentsDefault-2" />
|
||||
export HOME=$PWD <co xml:id="ex-buildBowerComponentsDefault-3" />
|
||||
${pkgs.nodePackages.gulp}/bin/gulp build <co xml:id="ex-buildBowerComponentsDefault-4" />
|
||||
'';
|
||||
|
||||
installPhase = "mv gulpdist $out";
|
||||
}
|
||||
</programlisting>
|
||||
</example>
|
||||
|
||||
<para>
|
||||
A few notes about <xref linkend="ex-buildBowerComponentsDefaultNix" />:
|
||||
|
||||
<calloutlist>
|
||||
<callout arearefs="ex-buildBowerComponentsDefault-1">
|
||||
<para>
|
||||
The result of <varname>buildBowerComponents</varname> is an
|
||||
input to the frontend build.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs="ex-buildBowerComponentsDefault-2">
|
||||
<para>
|
||||
Whether to symlink or copy the
|
||||
<filename>bower_components</filename> directory depends on the
|
||||
build tool in use. In this case a copy is used to avoid
|
||||
<command>gulp</command> silliness with permissions.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs="ex-buildBowerComponentsDefault-3">
|
||||
<para>
|
||||
<command>gulp</command> requires <varname>HOME</varname> to
|
||||
refer to a writeable directory.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs="ex-buildBowerComponentsDefault-4">
|
||||
<para>
|
||||
The actual build command. Other tools could be used.
|
||||
</para>
|
||||
</callout>
|
||||
</calloutlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section xml:id="ssec-bower2nix-troubleshooting">
|
||||
<title>Troubleshooting</title>
|
||||
|
||||
<variablelist>
|
||||
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>ENOCACHE</literal> errors from
|
||||
<varname>buildBowerComponents</varname>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This means that Bower was looking for a package version which
|
||||
doesn't exist in the generated
|
||||
<filename>bower-packages.nix</filename>.
|
||||
</para>
|
||||
<para>
|
||||
If <filename>bower.json</filename> has been updated, then run
|
||||
<command>bower2nix</command> again.
|
||||
</para>
|
||||
<para>
|
||||
It could also be a bug in <command>bower2nix</command> or
|
||||
<command>fetchbower</command>. If possible, try reformulating
|
||||
the version specification in <filename>bower.json</filename>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
</variablelist>
|
||||
|
||||
</section>
|
||||
|
||||
</section>
|
||||
@@ -1,41 +0,0 @@
|
||||
<section xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="sec-language-coq">
|
||||
|
||||
<title>Coq</title>
|
||||
<para>
|
||||
Coq libraries should be installed in
|
||||
<literal>$(out)/lib/coq/${coq.coq-version}/user-contrib/</literal>.
|
||||
Such directories are automatically added to the
|
||||
<literal>$COQPATH</literal> environment variable by the hook defined
|
||||
in the Coq derivation.
|
||||
</para>
|
||||
<para>
|
||||
Some libraries require OCaml and sometimes also Camlp5. The exact
|
||||
versions that were used to build Coq are saved in the
|
||||
<literal>coq.ocaml</literal> and <literal>coq.camlp5</literal>
|
||||
attributes.
|
||||
</para>
|
||||
<para>
|
||||
Here is a simple package example. It is a pure Coq library, thus it
|
||||
only depends on Coq. Its <literal>makefile</literal> has been
|
||||
generated using <literal>coq_makefile</literal> so we only have to
|
||||
set the <literal>$COQLIB</literal> variable at install time.
|
||||
</para>
|
||||
<programlisting>
|
||||
{stdenv, fetchurl, coq}:
|
||||
stdenv.mkDerivation {
|
||||
src = fetchurl {
|
||||
url = http://coq.inria.fr/pylons/contribs/files/Karatsuba/v8.4/Karatsuba.tar.gz;
|
||||
sha256 = "0ymfpv4v49k4fm63nq6gcl1hbnnxrvjjp7yzc4973n49b853c5b1";
|
||||
};
|
||||
|
||||
name = "coq-karatsuba";
|
||||
|
||||
buildInputs = [ coq ];
|
||||
|
||||
installFlags = "COQLIB=$(out)/lib/coq/${coq.coq-version}/";
|
||||
}
|
||||
</programlisting>
|
||||
</section>
|
||||
|
||||
@@ -1,166 +0,0 @@
|
||||
<section xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="sec-language-go">
|
||||
|
||||
<title>Go</title>
|
||||
|
||||
<para>The function <varname>buildGoPackage</varname> builds
|
||||
standard Go programs.
|
||||
</para>
|
||||
|
||||
<example xml:id='ex-buildGoPackage'><title>buildGoPackage</title>
|
||||
<programlisting>
|
||||
deis = buildGoPackage rec {
|
||||
name = "deis-${version}";
|
||||
version = "1.13.0";
|
||||
|
||||
goPackagePath = "github.com/deis/deis"; <co xml:id='ex-buildGoPackage-1' />
|
||||
subPackages = [ "client" ]; <co xml:id='ex-buildGoPackage-2' />
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "deis";
|
||||
repo = "deis";
|
||||
rev = "v${version}";
|
||||
sha256 = "1qv9lxqx7m18029lj8cw3k7jngvxs4iciwrypdy0gd2nnghc68sw";
|
||||
};
|
||||
|
||||
goDeps = ./deps.nix; <co xml:id='ex-buildGoPackage-3' />
|
||||
|
||||
buildFlags = "--tags release"; <co xml:id='ex-buildGoPackage-4' />
|
||||
}
|
||||
</programlisting>
|
||||
</example>
|
||||
|
||||
<para><xref linkend='ex-buildGoPackage'/> is an example expression using buildGoPackage,
|
||||
the following arguments are of special significance to the function:
|
||||
|
||||
<calloutlist>
|
||||
|
||||
<callout arearefs='ex-buildGoPackage-1'>
|
||||
<para>
|
||||
<varname>goPackagePath</varname> specifies the package's canonical Go import path.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-buildGoPackage-2'>
|
||||
<para>
|
||||
<varname>subPackages</varname> limits the builder from building child packages that
|
||||
have not been listed. If <varname>subPackages</varname> is not specified, all child
|
||||
packages will be built.
|
||||
</para>
|
||||
<para>
|
||||
In this example only <literal>github.com/deis/deis/client</literal> will be built.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-buildGoPackage-3'>
|
||||
<para>
|
||||
<varname>goDeps</varname> is where the Go dependencies of a Go program are listed
|
||||
as a list of package source identified by Go import path.
|
||||
It could be imported as a separate <varname>deps.nix</varname> file for
|
||||
readability. The dependency data structure is described below.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-buildGoPackage-4'>
|
||||
<para>
|
||||
<varname>buildFlags</varname> is a list of flags passed to the go build command.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
</calloutlist>
|
||||
|
||||
</para>
|
||||
|
||||
<para>The <varname>goDeps</varname> attribute can be imported from a separate
|
||||
<varname>nix</varname> file that defines which Go libraries are needed and should
|
||||
be included in <varname>GOPATH</varname> for <varname>buildPhase</varname>.
|
||||
</para>
|
||||
|
||||
<example xml:id='ex-goDeps'><title>deps.nix</title>
|
||||
<programlisting>
|
||||
[ <co xml:id='ex-goDeps-1' />
|
||||
{
|
||||
goPackagePath = "gopkg.in/yaml.v2"; <co xml:id='ex-goDeps-2' />
|
||||
fetch = {
|
||||
type = "git"; <co xml:id='ex-goDeps-3' />
|
||||
url = "https://gopkg.in/yaml.v2";
|
||||
rev = "a83829b6f1293c91addabc89d0571c246397bbf4";
|
||||
sha256 = "1m4dsmk90sbi17571h6pld44zxz7jc4lrnl4f27dpd1l8g5xvjhh";
|
||||
};
|
||||
}
|
||||
{
|
||||
goPackagePath = "github.com/docopt/docopt-go";
|
||||
fetch = {
|
||||
type = "git";
|
||||
url = "https://github.com/docopt/docopt-go";
|
||||
rev = "784ddc588536785e7299f7272f39101f7faccc3f";
|
||||
sha256 = "0wwz48jl9fvl1iknvn9dqr4gfy1qs03gxaikrxxp9gry6773v3sj";
|
||||
};
|
||||
}
|
||||
]
|
||||
</programlisting>
|
||||
</example>
|
||||
|
||||
<para>
|
||||
|
||||
<calloutlist>
|
||||
|
||||
<callout arearefs='ex-goDeps-1'>
|
||||
<para>
|
||||
<varname>goDeps</varname> is a list of Go dependencies.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-goDeps-2'>
|
||||
<para>
|
||||
<varname>goPackagePath</varname> specifies Go package import path.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
<callout arearefs='ex-goDeps-3'>
|
||||
<para>
|
||||
<varname>fetch type</varname> that needs to be used to get package source. If <varname>git</varname>
|
||||
is used there should be <varname>url</varname>, <varname>rev</varname> and <varname>sha256</varname>
|
||||
defined next to it.
|
||||
</para>
|
||||
</callout>
|
||||
|
||||
</calloutlist>
|
||||
|
||||
</para>
|
||||
|
||||
<para>
|
||||
<varname>buildGoPackage</varname> produces <xref linkend='chap-multiple-output' xrefstyle="select: title" />
|
||||
where <varname>bin</varname> includes program binaries. You can test build a Go binary as follows:
|
||||
|
||||
<screen>
|
||||
$ nix-build -A deis.bin
|
||||
</screen>
|
||||
|
||||
or build all outputs with:
|
||||
|
||||
<screen>
|
||||
$ nix-build -A deis.all
|
||||
</screen>
|
||||
|
||||
<varname>bin</varname> output will be installed by default with <varname>nix-env -i</varname>
|
||||
or <varname>systemPackages</varname>.
|
||||
|
||||
</para>
|
||||
|
||||
<para>
|
||||
You may use Go packages installed into the active Nix profiles by adding
|
||||
the following to your ~/.bashrc:
|
||||
|
||||
<screen>
|
||||
for p in $NIX_PROFILES; do
|
||||
GOPATH="$p/share/go:$GOPATH"
|
||||
done
|
||||
</screen>
|
||||
</para>
|
||||
|
||||
<para>To extract dependency information from a Go package in automated way use <link xlink:href="https://github.com/kamilchm/go2nix">go2nix</link>.
|
||||
It can produce complete derivation and <varname>goDeps</varname> file for Go programs.</para>
|
||||
</section>
|
||||
|
||||
@@ -1,825 +0,0 @@
|
||||
---
|
||||
title: User's Guide for Haskell in Nixpkgs
|
||||
author: Peter Simons
|
||||
date: 2015-06-01
|
||||
---
|
||||
# User's Guide to the Haskell Infrastructure
|
||||
|
||||
|
||||
## How to install Haskell packages
|
||||
|
||||
Nixpkgs distributes build instructions for all Haskell packages registered on
|
||||
[Hackage](http://hackage.haskell.org/), but strangely enough normal Nix package
|
||||
lookups don't seem to discover any of them, except for the default version of ghc, cabal-install, and stack:
|
||||
|
||||
$ nix-env -i alex
|
||||
error: selector ‘alex’ matches no derivations
|
||||
$ nix-env -qa ghc
|
||||
ghc-7.10.2
|
||||
|
||||
The Haskell package set is not registered in the top-level namespace because it
|
||||
is *huge*. If all Haskell packages were visible to these commands, then
|
||||
name-based search/install operations would be much slower than they are now. We
|
||||
avoided that by keeping all Haskell-related packages in a separate attribute
|
||||
set called `haskellPackages`, which the following command will list:
|
||||
|
||||
$ nix-env -f "<nixpkgs>" -qaP -A haskellPackages
|
||||
haskellPackages.a50 a50-0.5
|
||||
haskellPackages.abacate haskell-abacate-0.0.0.0
|
||||
haskellPackages.abcBridge haskell-abcBridge-0.12
|
||||
haskellPackages.afv afv-0.1.1
|
||||
haskellPackages.alex alex-3.1.4
|
||||
haskellPackages.Allure Allure-0.4.101.1
|
||||
haskellPackages.alms alms-0.6.7
|
||||
[... some 8000 entries omitted ...]
|
||||
|
||||
To install any of those packages into your profile, refer to them by their
|
||||
attribute path (first column):
|
||||
|
||||
$ nix-env -f "<nixpkgs>" -iA haskellPackages.Allure ...
|
||||
|
||||
The attribute path of any Haskell packages corresponds to the name of that
|
||||
particular package on Hackage: the package `cabal-install` has the attribute
|
||||
`haskellPackages.cabal-install`, and so on. (Actually, this convention causes
|
||||
trouble with packages like `3dmodels` and `4Blocks`, because these names are
|
||||
invalid identifiers in the Nix language. The issue of how to deal with these
|
||||
rare corner cases is currently unresolved.)
|
||||
|
||||
Haskell packages who's Nix name (second column) begins with a `haskell-` prefix
|
||||
are packages that provide a library whereas packages without that prefix
|
||||
provide just executables. Libraries may provide executables too, though: the
|
||||
package `haskell-pandoc`, for example, installs both a library and an
|
||||
application. You can install and use Haskell executables just like any other
|
||||
program in Nixpkgs, but using Haskell libraries for development is a bit
|
||||
trickier and we'll address that subject in great detail in section [How to
|
||||
create a development environment].
|
||||
|
||||
Attribute paths are deterministic inside of Nixpkgs, but the path necessary to
|
||||
reach Nixpkgs varies from system to system. We dodged that problem by giving
|
||||
`nix-env` an explicit `-f "<nixpkgs>"` parameter, but if you call `nix-env`
|
||||
without that flag, then chances are the invocation fails:
|
||||
|
||||
$ nix-env -iA haskellPackages.cabal-install
|
||||
error: attribute ‘haskellPackages’ in selection path
|
||||
‘haskellPackages.cabal-install’ not found
|
||||
|
||||
On NixOS, for example, Nixpkgs does *not* exist in the top-level namespace by
|
||||
default. To figure out the proper attribute path, it's easiest to query for the
|
||||
path of a well-known Nixpkgs package, i.e.:
|
||||
|
||||
$ nix-env -qaP coreutils
|
||||
nixos.coreutils coreutils-8.23
|
||||
|
||||
If your system responds like that (most NixOS installations will), then the
|
||||
attribute path to `haskellPackages` is `nixos.haskellPackages`. Thus, if you
|
||||
want to use `nix-env` without giving an explicit `-f` flag, then that's the way
|
||||
to do it:
|
||||
|
||||
$ nix-env -qaP -A nixos.haskellPackages
|
||||
$ nix-env -iA nixos.haskellPackages.cabal-install
|
||||
|
||||
Our current default compiler is GHC 7.10.x and the `haskellPackages` set
|
||||
contains packages built with that particular version. Nixpkgs contains the
|
||||
latest major release of every GHC since 6.10.4, however, and there is a whole
|
||||
family of package sets available that defines Hackage packages built with each
|
||||
of those compilers, too:
|
||||
|
||||
$ nix-env -f "<nixpkgs>" -qaP -A haskell.packages.ghc6123
|
||||
$ nix-env -f "<nixpkgs>" -qaP -A haskell.packages.ghc763
|
||||
|
||||
The name `haskellPackages` is really just a synonym for
|
||||
`haskell.packages.ghc7102`, because we prefer that package set internally and
|
||||
recommend it to our users as their default choice, but ultimately you are free
|
||||
to compile your Haskell packages with any GHC version you please. The following
|
||||
command displays the complete list of available compilers:
|
||||
|
||||
$ nix-env -f "<nixpkgs>" -qaP -A haskell.compiler
|
||||
haskell.compiler.ghc6104 ghc-6.10.4
|
||||
haskell.compiler.ghc6123 ghc-6.12.3
|
||||
haskell.compiler.ghc704 ghc-7.0.4
|
||||
haskell.compiler.ghc722 ghc-7.2.2
|
||||
haskell.compiler.ghc742 ghc-7.4.2
|
||||
haskell.compiler.ghc763 ghc-7.6.3
|
||||
haskell.compiler.ghc784 ghc-7.8.4
|
||||
haskell.compiler.ghc7102 ghc-7.10.2
|
||||
haskell.compiler.ghcHEAD ghc-7.11.20150402
|
||||
haskell.compiler.ghcNokinds ghc-nokinds-7.11.20150704
|
||||
haskell.compiler.ghcjs ghcjs-0.1.0
|
||||
haskell.compiler.jhc jhc-0.8.2
|
||||
haskell.compiler.uhc uhc-1.1.9.0
|
||||
|
||||
We have no package sets for `jhc` or `uhc` yet, unfortunately, but for every
|
||||
version of GHC listed above, there exists a package set based on that compiler.
|
||||
Also, the attributes `haskell.compiler.ghcXYC` and
|
||||
`haskell.packages.ghcXYC.ghc` are synonymous for the sake of convenience.
|
||||
|
||||
## How to create a development environment
|
||||
|
||||
### How to install a compiler
|
||||
|
||||
A simple development environment consists of a Haskell compiler and one or both
|
||||
of the tools `cabal-install` and `stack`. We saw in section
|
||||
[How to install Haskell packages] how you can install those programs into your
|
||||
user profile:
|
||||
|
||||
$ nix-env -f "<nixpkgs>" -iA haskellPackages.ghc haskellPackages.cabal-install
|
||||
|
||||
Instead of the default package set `haskellPackages`, you can also use the more
|
||||
precise name `haskell.compiler.ghc7102`, which has the advantage that it refers
|
||||
to the same GHC version regardless of what Nixpkgs considers "default" at any
|
||||
given time.
|
||||
|
||||
Once you've made those tools available in `$PATH`, it's possible to build
|
||||
Hackage packages the same way people without access to Nix do it all the time:
|
||||
|
||||
$ cabal get lens-4.11 && cd lens-4.11
|
||||
$ cabal install -j --dependencies-only
|
||||
$ cabal configure
|
||||
$ cabal build
|
||||
|
||||
If you enjoy working with Cabal sandboxes, then that's entirely possible too:
|
||||
just execute the command
|
||||
|
||||
$ cabal sandbox init
|
||||
|
||||
before installing the required dependencies.
|
||||
|
||||
The `nix-shell` utility makes it easy to switch to a different compiler
|
||||
version; just enter the Nix shell environment with the command
|
||||
|
||||
$ nix-shell -p haskell.compiler.ghc784
|
||||
|
||||
to bring GHC 7.8.4 into `$PATH`. Alternatively, you can use Stack instead of
|
||||
`nix-shell` directly to select compiler versions and other build tools
|
||||
per-project. It uses `nix-shell` under the hood when Nix support is turned on.
|
||||
See [How to build a Haskell project using Stack].
|
||||
|
||||
If you're using `cabal-install`, re-running `cabal configure` inside the spawned
|
||||
shell switches your build to use that compiler instead. If you're working on
|
||||
a project that doesn't depend on any additional system libraries outside of GHC,
|
||||
then it's even sufficient to just run the `cabal configure` command inside of
|
||||
the shell:
|
||||
|
||||
$ nix-shell -p haskell.compiler.ghc784 --command "cabal configure"
|
||||
|
||||
Afterwards, all other commands like `cabal build` work just fine in any shell
|
||||
environment, because the configure phase recorded the absolute paths to all
|
||||
required tools like GHC in its build configuration inside of the `dist/`
|
||||
directory. Please note, however, that `nix-collect-garbage` can break such an
|
||||
environment because the Nix store paths created by `nix-shell` aren't "alive"
|
||||
anymore once `nix-shell` has terminated. If you find that your Haskell builds
|
||||
no longer work after garbage collection, then you'll have to re-run `cabal
|
||||
configure` inside of a new `nix-shell` environment.
|
||||
|
||||
### How to install a compiler with libraries
|
||||
|
||||
GHC expects to find all installed libraries inside of its own `lib` directory.
|
||||
This approach works fine on traditional Unix systems, but it doesn't work for
|
||||
Nix, because GHC's store path is immutable once it's built. We cannot install
|
||||
additional libraries into that location. As a consequence, our copies of GHC
|
||||
don't know any packages except their own core libraries, like `base`,
|
||||
`containers`, `Cabal`, etc.
|
||||
|
||||
We can register additional libraries to GHC, however, using a special build
|
||||
function called `ghcWithPackages`. That function expects one argument: a
|
||||
function that maps from an attribute set of Haskell packages to a list of
|
||||
packages, which determines the libraries known to that particular version of
|
||||
GHC. For example, the Nix expression `ghcWithPackages (pkgs: [pkgs.mtl])`
|
||||
generates a copy of GHC that has the `mtl` library registered in addition to
|
||||
its normal core packages:
|
||||
|
||||
$ nix-shell -p "haskellPackages.ghcWithPackages (pkgs: [pkgs.mtl])"
|
||||
|
||||
[nix-shell:~]$ ghc-pkg list mtl
|
||||
/nix/store/zy79...-ghc-7.10.2/lib/ghc-7.10.2/package.conf.d:
|
||||
mtl-2.2.1
|
||||
|
||||
This function allows users to define their own development environment by means
|
||||
of an override. After adding the following snippet to `~/.nixpkgs/config.nix`,
|
||||
|
||||
{
|
||||
packageOverrides = super: let self = super.pkgs; in
|
||||
{
|
||||
myHaskellEnv = self.haskell.packages.ghc7102.ghcWithPackages
|
||||
(haskellPackages: with haskellPackages; [
|
||||
# libraries
|
||||
arrows async cgi criterion
|
||||
# tools
|
||||
cabal-install haskintex
|
||||
]);
|
||||
};
|
||||
}
|
||||
|
||||
it's possible to install that compiler with `nix-env -f "<nixpkgs>" -iA
|
||||
myHaskellEnv`. If you'd like to switch that development environment to a
|
||||
different version of GHC, just replace the `ghc7102` bit in the previous
|
||||
definition with the appropriate name. Of course, it's also possible to define
|
||||
any number of these development environments! (You can't install two of them
|
||||
into the same profile at the same time, though, because that would result in
|
||||
file conflicts.)
|
||||
|
||||
The generated `ghc` program is a wrapper script that re-directs the real
|
||||
GHC executable to use a new `lib` directory --- one that we specifically
|
||||
constructed to contain all those packages the user requested:
|
||||
|
||||
$ cat $(type -p ghc)
|
||||
#! /nix/store/xlxj...-bash-4.3-p33/bin/bash -e
|
||||
export NIX_GHC=/nix/store/19sm...-ghc-7.10.2/bin/ghc
|
||||
export NIX_GHCPKG=/nix/store/19sm...-ghc-7.10.2/bin/ghc-pkg
|
||||
export NIX_GHC_DOCDIR=/nix/store/19sm...-ghc-7.10.2/share/doc/ghc/html
|
||||
export NIX_GHC_LIBDIR=/nix/store/19sm...-ghc-7.10.2/lib/ghc-7.10.2
|
||||
exec /nix/store/j50p...-ghc-7.10.2/bin/ghc "-B$NIX_GHC_LIBDIR" "$@"
|
||||
|
||||
The variables `$NIX_GHC`, `$NIX_GHCPKG`, etc. point to the *new* store path
|
||||
`ghcWithPackages` constructed specifically for this environment. The last line
|
||||
of the wrapper script then executes the real `ghc`, but passes the path to the
|
||||
new `lib` directory using GHC's `-B` flag.
|
||||
|
||||
The purpose of those environment variables is to work around an impurity in the
|
||||
popular [ghc-paths](http://hackage.haskell.org/package/ghc-paths) library. That
|
||||
library promises to give its users access to GHC's installation paths. Only,
|
||||
the library can't possible know that path when it's compiled, because the path
|
||||
GHC considers its own is determined only much later, when the user configures
|
||||
it through `ghcWithPackages`. So we [patched
|
||||
ghc-paths](https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/haskell-modules/patches/ghc-paths-nix.patch)
|
||||
to return the paths found in those environment variables at run-time rather
|
||||
than trying to guess them at compile-time.
|
||||
|
||||
To make sure that mechanism works properly all the time, we recommend that you
|
||||
set those variables to meaningful values in your shell environment, too, i.e.
|
||||
by adding the following code to your `~/.bashrc`:
|
||||
|
||||
if type >/dev/null 2>&1 -p ghc; then
|
||||
eval "$(egrep ^export "$(type -p ghc)")"
|
||||
fi
|
||||
|
||||
If you are certain that you'll use only one GHC environment which is located in
|
||||
your user profile, then you can use the following code, too, which has the
|
||||
advantage that it doesn't contain any paths from the Nix store, i.e. those
|
||||
settings always remain valid even if a `nix-env -u` operation updates the GHC
|
||||
environment in your profile:
|
||||
|
||||
if [ -e ~/.nix-profile/bin/ghc ]; then
|
||||
export NIX_GHC="$HOME/.nix-profile/bin/ghc"
|
||||
export NIX_GHCPKG="$HOME/.nix-profile/bin/ghc-pkg"
|
||||
export NIX_GHC_DOCDIR="$HOME/.nix-profile/share/doc/ghc/html"
|
||||
export NIX_GHC_LIBDIR="$HOME/.nix-profile/lib/ghc-$($NIX_GHC --numeric-version)"
|
||||
fi
|
||||
|
||||
### How to install a compiler with libraries, hoogle and documentation indexes
|
||||
|
||||
If you plan to use your environment for interactive programming, not just
|
||||
compiling random Haskell code, you might want to replace `ghcWithPackages` in
|
||||
all the listings above with `ghcWithHoogle`.
|
||||
|
||||
This environment generator not only produces an environment with GHC and all
|
||||
the specified libraries, but also generates a `hoogle` and `haddock` indexes
|
||||
for all the packages, and provides a wrapper script around `hoogle` binary that
|
||||
uses all those things. A precise name for this thing would be
|
||||
"`ghcWithPackagesAndHoogleAndDocumentationIndexes`", which is, regrettably, too
|
||||
long and scary.
|
||||
|
||||
For example, installing the following environment
|
||||
|
||||
{
|
||||
packageOverrides = super: let self = super.pkgs; in
|
||||
{
|
||||
myHaskellEnv = self.haskellPackages.ghcWithHoogle
|
||||
(haskellPackages: with haskellPackages; [
|
||||
# libraries
|
||||
arrows async cgi criterion
|
||||
# tools
|
||||
cabal-install haskintex
|
||||
]);
|
||||
};
|
||||
}
|
||||
|
||||
allows one to browse module documentation index [not too dissimilar to
|
||||
this](https://downloads.haskell.org/~ghc/latest/docs/html/libraries/index.html)
|
||||
for all the specified packages and their dependencies by directing a browser of
|
||||
choice to `~/.nix-profiles/share/doc/hoogle/index.html` (or
|
||||
`/run/current-system/sw/share/doc/hoogle/index.html` in case you put it in
|
||||
`environment.systemPackages` in NixOS).
|
||||
|
||||
After you've marveled enough at that try adding the following to your
|
||||
`~/.ghc/ghci.conf`
|
||||
|
||||
:def hoogle \s -> return $ ":! hoogle search -cl --count=15 \"" ++ s ++ "\""
|
||||
:def doc \s -> return $ ":! hoogle search -cl --info \"" ++ s ++ "\""
|
||||
|
||||
and test it by typing into `ghci`:
|
||||
|
||||
:hoogle a -> a
|
||||
:doc a -> a
|
||||
|
||||
Be sure to note the links to `haddock` files in the output. With any modern and
|
||||
properly configured terminal emulator you can just click those links to
|
||||
navigate there.
|
||||
|
||||
Finally, you can run
|
||||
|
||||
hoogle server -p 8080
|
||||
|
||||
and navigate to http://localhost:8080/ for your own local
|
||||
[Hoogle](https://www.haskell.org/hoogle/). Note, however, that Firefox and
|
||||
possibly other browsers disallow navigation from `http:` to `file:` URIs for
|
||||
security reasons, which might be quite an inconvenience. See [this
|
||||
page](http://kb.mozillazine.org/Links_to_local_pages_do_not_work) for
|
||||
workarounds.
|
||||
|
||||
### How to build a Haskell project using Stack
|
||||
|
||||
[Stack](http://haskellstack.org) is a popular build tool for Haskell projects.
|
||||
It has first-class support for Nix. Stack can optionally use Nix to
|
||||
automatically select the right version of GHC and other build tools to build,
|
||||
test and execute apps in an existing project downloaded from somewhere on the
|
||||
Internet. Pass the `--nix` flag to any `stack` command to do so, e.g.
|
||||
|
||||
$ git clone --recursive http://github.com/yesodweb/wai
|
||||
$ cd wai
|
||||
$ stack --nix build
|
||||
|
||||
If you want `stack` to use Nix by default, you can add a `nix` section to the
|
||||
`stack.yaml` file, as explained in the [Stack documentation][stack-nix-doc]. For
|
||||
example:
|
||||
|
||||
nix:
|
||||
enable: true
|
||||
packages: [pkgconfig zeromq zlib]
|
||||
|
||||
The example configuration snippet above tells Stack to create an ad hoc
|
||||
environment for `nix-shell` as in the below section, in which the `pkgconfig`,
|
||||
`zeromq` and `zlib` packages from Nixpkgs are available. All `stack` commands
|
||||
will implicitly be executed inside this ad hoc environment.
|
||||
|
||||
Some projects have more sophisticated needs. For examples, some ad hoc
|
||||
environments might need to expose Nixpkgs packages compiled in a certain way, or
|
||||
with extra environment variables. In these cases, you'll need a `shell` field
|
||||
instead of `packages`:
|
||||
|
||||
nix:
|
||||
enable: true
|
||||
shell-file: shell.nix
|
||||
|
||||
For more on how to write a `shell.nix` file see the below section. You'll need
|
||||
to express a derivation. Note that Nixpkgs ships with a convenience wrapper
|
||||
function around `mkDerivation` called `haskell.lib.buildStackProject` to help you
|
||||
create this derivation in exactly the way Stack expects. All of the same inputs
|
||||
as `mkDerivation` can be provided. For example, to build a Stack project that
|
||||
including packages that link against a version of the R library compiled with
|
||||
special options turned on:
|
||||
|
||||
with (import <nixpkgs> { });
|
||||
|
||||
let R = pkgs.R.override { enableStrictBarrier = true; };
|
||||
in
|
||||
haskell.lib.buildStackProject {
|
||||
name = "HaskellR";
|
||||
buildInputs = [ R zeromq zlib ];
|
||||
}
|
||||
|
||||
You can select a particular GHC version to compile with by setting the
|
||||
`ghc` attribute as an argument to `buildStackProject`. Better yet, let
|
||||
Stack choose what GHC version it wants based on the snapshot specified
|
||||
in `stack.yaml` (only works with Stack >= 1.1.3):
|
||||
|
||||
{nixpkgs ? import <nixpkgs> { }, ghc ? nixpkgs.ghc}:
|
||||
|
||||
with nixpkgs;
|
||||
|
||||
let R = pkgs.R.override { enableStrictBarrier = true; };
|
||||
in
|
||||
haskell.lib.buildStackProject {
|
||||
name = "HaskellR";
|
||||
buildInputs = [ R zeromq zlib ];
|
||||
inherit ghc;
|
||||
}
|
||||
|
||||
[stack-nix-doc]: http://docs.haskellstack.org/en/stable/nix_integration.html
|
||||
|
||||
### How to create ad hoc environments for `nix-shell`
|
||||
|
||||
The easiest way to create an ad hoc development environment is to run
|
||||
`nix-shell` with the appropriate GHC environment given on the command-line:
|
||||
|
||||
nix-shell -p "haskellPackages.ghcWithPackages (pkgs: with pkgs; [mtl pandoc])"
|
||||
|
||||
For more sophisticated use-cases, however, it's more convenient to save the
|
||||
desired configuration in a file called `shell.nix` that looks like this:
|
||||
|
||||
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }:
|
||||
let
|
||||
inherit (nixpkgs) pkgs;
|
||||
ghc = pkgs.haskell.packages.${compiler}.ghcWithPackages (ps: with ps; [
|
||||
monad-par mtl
|
||||
]);
|
||||
in
|
||||
pkgs.stdenv.mkDerivation {
|
||||
name = "my-haskell-env-0";
|
||||
buildInputs = [ ghc ];
|
||||
shellHook = "eval $(egrep ^export ${ghc}/bin/ghc)";
|
||||
}
|
||||
|
||||
Now run `nix-shell` --- or even `nix-shell --pure` --- to enter a shell
|
||||
environment that has the appropriate compiler in `$PATH`. If you use `--pure`,
|
||||
then add all other packages that your development environment needs into the
|
||||
`buildInputs` attribute. If you'd like to switch to a different compiler
|
||||
version, then pass an appropriate `compiler` argument to the expression, i.e.
|
||||
`nix-shell --argstr compiler ghc784`.
|
||||
|
||||
If you need such an environment because you'd like to compile a Hackage package
|
||||
outside of Nix --- i.e. because you're hacking on the latest version from Git
|
||||
---, then the package set provides suitable nix-shell environments for you
|
||||
already! Every Haskell package has an `env` attribute that provides a shell
|
||||
environment suitable for compiling that particular package. If you'd like to
|
||||
hack the `lens` library, for example, then you just have to check out the
|
||||
source code and enter the appropriate environment:
|
||||
|
||||
$ cabal get lens-4.11 && cd lens-4.11
|
||||
Downloading lens-4.11...
|
||||
Unpacking to lens-4.11/
|
||||
|
||||
$ nix-shell "<nixpkgs>" -A haskellPackages.lens.env
|
||||
[nix-shell:/tmp/lens-4.11]$
|
||||
|
||||
At point, you can run `cabal configure`, `cabal build`, and all the other
|
||||
development commands. Note that you need `cabal-install` installed in your
|
||||
`$PATH` already to use it here --- the `nix-shell` environment does not provide
|
||||
it.
|
||||
|
||||
## How to create Nix builds for your own private Haskell packages
|
||||
|
||||
If your own Haskell packages have build instructions for Cabal, then you can
|
||||
convert those automatically into build instructions for Nix using the
|
||||
`cabal2nix` utility, which you can install into your profile by running
|
||||
`nix-env -i cabal2nix`.
|
||||
|
||||
### How to build a stand-alone project
|
||||
|
||||
For example, let's assume that you're working on a private project called
|
||||
`foo`. To generate a Nix build expression for it, change into the project's
|
||||
top-level directory and run the command:
|
||||
|
||||
$ cabal2nix . >foo.nix
|
||||
|
||||
Then write the following snippet into a file called `default.nix`:
|
||||
|
||||
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }:
|
||||
nixpkgs.pkgs.haskell.packages.${compiler}.callPackage ./foo.nix { }
|
||||
|
||||
Finally, store the following code in a file called `shell.nix`:
|
||||
|
||||
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }:
|
||||
(import ./default.nix { inherit nixpkgs compiler; }).env
|
||||
|
||||
At this point, you can run `nix-build` to have Nix compile your project and
|
||||
install it into a Nix store path. The local directory will contain a symlink
|
||||
called `result` after `nix-build` returns that points into that location. Of
|
||||
course, passing the flag `--argstr compiler ghc763` allows switching the build
|
||||
to any version of GHC currently supported.
|
||||
|
||||
Furthermore, you can call `nix-shell` to enter an interactive development
|
||||
environment in which you can use `cabal configure` and `cabal build` to develop
|
||||
your code. That environment will automatically contain a proper GHC derivation
|
||||
with all the required libraries registered as well as all the system-level
|
||||
libraries your package might need.
|
||||
|
||||
If your package does not depend on any system-level libraries, then it's
|
||||
sufficient to run
|
||||
|
||||
$ nix-shell --command "cabal configure"
|
||||
|
||||
once to set up your build. `cabal-install` determines the absolute paths to all
|
||||
resources required for the build and writes them into a config file in the
|
||||
`dist/` directory. Once that's done, you can run `cabal build` and any other
|
||||
command for that project even outside of the `nix-shell` environment. This
|
||||
feature is particularly nice for those of us who like to edit their code with
|
||||
an IDE, like Emacs' `haskell-mode`, because it's not necessary to start Emacs
|
||||
inside of nix-shell just to make it find out the necessary settings for
|
||||
building the project; `cabal-install` has already done that for us.
|
||||
|
||||
If you want to do some quick-and-dirty hacking and don't want to bother setting
|
||||
up a `default.nix` and `shell.nix` file manually, then you can use the
|
||||
`--shell` flag offered by `cabal2nix` to have it generate a stand-alone
|
||||
`nix-shell` environment for you. With that feature, running
|
||||
|
||||
$ cabal2nix --shell . >shell.nix
|
||||
$ nix-shell --command "cabal configure"
|
||||
|
||||
is usually enough to set up a build environment for any given Haskell package.
|
||||
You can even use that generated file to run `nix-build`, too:
|
||||
|
||||
$ nix-build shell.nix
|
||||
|
||||
### How to build projects that depend on each other
|
||||
|
||||
If you have multiple private Haskell packages that depend on each other, then
|
||||
you'll have to register those packages in the Nixpkgs set to make them visible
|
||||
for the dependency resolution performed by `callPackage`. First of all, change
|
||||
into each of your projects top-level directories and generate a `default.nix`
|
||||
file with `cabal2nix`:
|
||||
|
||||
$ cd ~/src/foo && cabal2nix . >default.nix
|
||||
$ cd ~/src/bar && cabal2nix . >default.nix
|
||||
|
||||
Then edit your `~/.nixpkgs/config.nix` file to register those builds in the
|
||||
default Haskell package set:
|
||||
|
||||
{
|
||||
packageOverrides = super: let self = super.pkgs; in
|
||||
{
|
||||
haskellPackages = super.haskellPackages.override {
|
||||
overrides = self: super: {
|
||||
foo = self.callPackage ../src/foo {};
|
||||
bar = self.callPackage ../src/bar {};
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
Once that's accomplished, `nix-env -f "<nixpkgs>" -qA haskellPackages` will
|
||||
show your packages like any other package from Hackage, and you can build them
|
||||
|
||||
$ nix-build "<nixpkgs>" -A haskellPackages.foo
|
||||
|
||||
or enter an interactive shell environment suitable for building them:
|
||||
|
||||
$ nix-shell "<nixpkgs>" -A haskellPackages.bar.env
|
||||
|
||||
## Miscellaneous Topics
|
||||
|
||||
### How to build with profiling enabled
|
||||
|
||||
Every Haskell package set takes a function called `overrides` that you can use
|
||||
to manipulate the package as much as you please. One useful application of this
|
||||
feature is to replace the default `mkDerivation` function with one that enables
|
||||
library profiling for all packages. To accomplish that, add configure the
|
||||
following snippet in your `~/.nixpkgs/config.nix` file:
|
||||
|
||||
{
|
||||
packageOverrides = super: let self = super.pkgs; in
|
||||
{
|
||||
profiledHaskellPackages = self.haskellPackages.override {
|
||||
overrides = self: super: {
|
||||
mkDerivation = args: super.mkDerivation (args // {
|
||||
enableLibraryProfiling = true;
|
||||
});
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
Then, replace instances of `haskellPackages` in the `cabal2nix`-generated
|
||||
`default.nix` or `shell.nix` files with `profiledHaskellPackages`.
|
||||
|
||||
### How to override package versions in a compiler-specific package set
|
||||
|
||||
Nixpkgs provides the latest version of
|
||||
[`ghc-events`](http://hackage.haskell.org/package/ghc-events), which is 0.4.4.0
|
||||
at the time of this writing. This is fine for users of GHC 7.10.x, but GHC
|
||||
7.8.4 cannot compile that binary. Now, one way to solve that problem is to
|
||||
register an older version of `ghc-events` in the 7.8.x-specific package set.
|
||||
The first step is to generate Nix build instructions with `cabal2nix`:
|
||||
|
||||
$ cabal2nix cabal://ghc-events-0.4.3.0 >~/.nixpkgs/ghc-events-0.4.3.0.nix
|
||||
|
||||
Then add the override in `~/.nixpkgs/config.nix`:
|
||||
|
||||
{
|
||||
packageOverrides = super: let self = super.pkgs; in
|
||||
{
|
||||
haskell = super.haskell // {
|
||||
packages = super.haskell.packages // {
|
||||
ghc784 = super.haskell.packages.ghc784.override {
|
||||
overrides = self: super: {
|
||||
ghc-events = self.callPackage ./ghc-events-0.4.3.0.nix {};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
This code is a little crazy, no doubt, but it's necessary because the intuitive
|
||||
version
|
||||
|
||||
haskell.packages.ghc784 = super.haskell.packages.ghc784.override {
|
||||
overrides = self: super: {
|
||||
ghc-events = self.callPackage ./ghc-events-0.4.3.0.nix {};
|
||||
};
|
||||
};
|
||||
|
||||
doesn't do what we want it to: that code replaces the `haskell` package set in
|
||||
Nixpkgs with one that contains only one entry,`packages`, which contains only
|
||||
one entry `ghc784`. This override loses the `haskell.compiler` set, and it
|
||||
loses the `haskell.packages.ghcXYZ` sets for all compilers but GHC 7.8.4. To
|
||||
avoid that problem, we have to perform the convoluted little dance from above,
|
||||
iterating over each step in hierarchy.
|
||||
|
||||
Once it's accomplished, however, we can install a variant of `ghc-events`
|
||||
that's compiled with GHC 7.8.4:
|
||||
|
||||
nix-env -f "<nixpkgs>" -iA haskell.packages.ghc784.ghc-events
|
||||
|
||||
Unfortunately, it turns out that this build fails again while executing the
|
||||
test suite! Apparently, the release archive on Hackage is missing some data
|
||||
files that the test suite requires, so we cannot run it. We accomplish that by
|
||||
re-generating the Nix expression with the `--no-check` flag:
|
||||
|
||||
$ cabal2nix --no-check cabal://ghc-events-0.4.3.0 >~/.nixpkgs/ghc-events-0.4.3.0.nix
|
||||
|
||||
Now the builds succeeds.
|
||||
|
||||
Of course, in the concrete example of `ghc-events` this whole exercise is not
|
||||
an ideal solution, because `ghc-events` can analyze the output emitted by any
|
||||
version of GHC later than 6.12 regardless of the compiler version that was used
|
||||
to build the `ghc-events` executable, so strictly speaking there's no reason to
|
||||
prefer one built with GHC 7.8.x in the first place. However, for users who
|
||||
cannot use GHC 7.10.x at all for some reason, the approach of downgrading to an
|
||||
older version might be useful.
|
||||
|
||||
### How to recover from GHC's infamous non-deterministic library ID bug
|
||||
|
||||
GHC and distributed build farms don't get along well:
|
||||
|
||||
https://ghc.haskell.org/trac/ghc/ticket/4012
|
||||
|
||||
When you see an error like this one
|
||||
|
||||
package foo-0.7.1.0 is broken due to missing package
|
||||
text-1.2.0.4-98506efb1b9ada233bb5c2b2db516d91
|
||||
|
||||
then you have to download and re-install `foo` and all its dependents from
|
||||
scratch:
|
||||
|
||||
# nix-store -q --referrers /nix/store/*-haskell-text-1.2.0.4 \
|
||||
| xargs -L 1 nix-store --repair-path
|
||||
|
||||
If you're using additional Hydra servers other than `hydra.nixos.org`, then it
|
||||
might be necessary to purge the local caches that store data from those
|
||||
machines to disable these binary channels for the duration of the previous
|
||||
command, i.e. by running:
|
||||
|
||||
rm /nix/var/nix/binary-cache-v3.sqlite
|
||||
rm /nix/var/nix/manifests/*
|
||||
rm /nix/var/nix/channel-cache/*
|
||||
|
||||
### How to use the Haste Haskell-to-Javascript transpiler
|
||||
|
||||
Open a shell with `haste-compiler` and `haste-cabal-install` (you don't actually need
|
||||
`node`, but it can be useful to test stuff):
|
||||
|
||||
$ nix-shell -p "haskellPackages.ghcWithPackages (self: with self; [haste-cabal-install haste-compiler])" -p nodejs
|
||||
|
||||
You may not need the following step but if `haste-boot` fails to compile all the
|
||||
packages it needs, this might do the trick
|
||||
|
||||
$ haste-cabal update
|
||||
|
||||
`haste-boot` builds a set of core libraries so that they can be used from Javascript
|
||||
transpiled programs:
|
||||
|
||||
$ haste-boot
|
||||
|
||||
Transpile and run a "Hello world" program:
|
||||
|
||||
$ echo 'module Main where main = putStrLn "Hello world"' > hello-world.hs
|
||||
$ hastec --onexec hello-world.hs
|
||||
$ node hello-world.js
|
||||
Hello world
|
||||
|
||||
### Builds on Darwin fail with `math.h` not found
|
||||
|
||||
Users of GHC on Darwin have occasionally reported that builds fail, because the
|
||||
compiler complains about a missing include file:
|
||||
|
||||
fatal error: 'math.h' file not found
|
||||
|
||||
The issue has been discussed at length in [ticket
|
||||
6390](https://github.com/NixOS/nixpkgs/issues/6390), and so far no good
|
||||
solution has been proposed. As a work-around, users who run into this problem
|
||||
can configure the environment variables
|
||||
|
||||
export NIX_CFLAGS_COMPILE="-idirafter /usr/include"
|
||||
export NIX_CFLAGS_LINK="-L/usr/lib"
|
||||
|
||||
in their `~/.bashrc` file to avoid the compiler error.
|
||||
|
||||
### Builds using Stack complain about missing system libraries
|
||||
|
||||
-- While building package zlib-0.5.4.2 using:
|
||||
runhaskell -package=Cabal-1.22.4.0 -clear-package-db [... lots of flags ...]
|
||||
Process exited with code: ExitFailure 1
|
||||
Logs have been written to: /home/foo/src/stack-ide/.stack-work/logs/zlib-0.5.4.2.log
|
||||
|
||||
Configuring zlib-0.5.4.2...
|
||||
Setup.hs: Missing dependency on a foreign library:
|
||||
* Missing (or bad) header file: zlib.h
|
||||
This problem can usually be solved by installing the system package that
|
||||
provides this library (you may need the "-dev" version). If the library is
|
||||
already installed but in a non-standard location then you can use the flags
|
||||
--extra-include-dirs= and --extra-lib-dirs= to specify where it is.
|
||||
If the header file does exist, it may contain errors that are caught by the C
|
||||
compiler at the preprocessing stage. In this case you can re-run configure
|
||||
with the verbosity flag -v3 to see the error messages.
|
||||
|
||||
When you run the build inside of the nix-shell environment, the system
|
||||
is configured to find libz.so without any special flags -- the compiler
|
||||
and linker "just know" how to find it. Consequently, Cabal won't record
|
||||
any search paths for libz.so in the package description, which means
|
||||
that the package works fine inside of nix-shell, but once you leave the
|
||||
shell the shared object can no longer be found. That issue is by no
|
||||
means specific to Stack: you'll have that problem with any other
|
||||
Haskell package that's built inside of nix-shell but run outside of that
|
||||
environment.
|
||||
|
||||
You can remedy this issue in several ways. The easiest is to add a `nix` section
|
||||
to the `stack.yaml` like the following:
|
||||
|
||||
nix:
|
||||
enable: true
|
||||
packages: [ zlib ]
|
||||
|
||||
Stack's Nix support knows to add `${zlib.out}/lib` and `${zlib.dev}/include` as an
|
||||
`--extra-lib-dirs` and `extra-include-dirs`, respectively. Alternatively, you
|
||||
can achieve the same effect by hand. First of all, run
|
||||
|
||||
$ nix-build --no-out-link "<nixpkgs>" -A zlib
|
||||
/nix/store/alsvwzkiw4b7ip38l4nlfjijdvg3fvzn-zlib-1.2.8
|
||||
|
||||
to find out the store path of the system's zlib library. Now, you can
|
||||
|
||||
1) add that path (plus a "/lib" suffix) to your $LD_LIBRARY_PATH
|
||||
environment variable to make sure your system linker finds libz.so
|
||||
automatically. It's no pretty solution, but it will work.
|
||||
|
||||
2) As a variant of (1), you can also install any number of system
|
||||
libraries into your user's profile (or some other profile) and point
|
||||
$LD_LIBRARY_PATH to that profile instead, so that you don't have to
|
||||
list dozens of those store paths all over the place.
|
||||
|
||||
3) The solution I prefer is to call stack with an appropriate
|
||||
--extra-lib-dirs flag like so:
|
||||
|
||||
$ stack --extra-lib-dirs=/nix/store/alsvwzkiw4b7ip38l4nlfjijdvg3fvzn-zlib-1.2.8/lib build
|
||||
|
||||
Typically, you'll need --extra-include-dirs as well. It's possible
|
||||
to add those flag to the project's "stack.yaml" or your user's
|
||||
global "~/.stack/global/stack.yaml" file so that you don't have to
|
||||
specify them manually every time. But again, you're likely better off using
|
||||
Stack's Nix support instead.
|
||||
|
||||
The same thing applies to `cabal configure`, of course, if you're
|
||||
building with `cabal-install` instead of Stack.
|
||||
|
||||
### Creating statically linked binaries
|
||||
|
||||
There are two levels of static linking. The first option is to configure the
|
||||
build with the Cabal flag `--disable-executable-dynamic`. In Nix expressions,
|
||||
this can be achieved by setting the attribute:
|
||||
|
||||
enableSharedExecutables = false;
|
||||
|
||||
That gives you a binary with statically linked Haskell libraries and
|
||||
dynamically linked system libraries.
|
||||
|
||||
To link both Haskell libraries and system libraries statically, the additional
|
||||
flags `--ghc-option=-optl=-static --ghc-option=-optl=-pthread` need to be used.
|
||||
In Nix, this is accomplished with:
|
||||
|
||||
configureFlags = [ "--ghc-option=-optl=-static" "--ghc-option=-optl=-pthread" ];
|
||||
|
||||
It's important to realize, however, that most system libraries in Nix are built
|
||||
as shared libraries only, i.e. there is just no static library available that
|
||||
Cabal could link!
|
||||
|
||||
|
||||
## Other resources
|
||||
|
||||
- The Youtube video [Nix Loves Haskell](https://www.youtube.com/watch?v=BsBhi_r-OeE)
|
||||
provides an introduction into Haskell NG aimed at beginners. The slides are
|
||||
available at http://cryp.to/nixos-meetup-3-slides.pdf and also -- in a form
|
||||
ready for cut & paste -- at
|
||||
https://github.com/NixOS/cabal2nix/blob/master/doc/nixos-meetup-3-slides.md.
|
||||
|
||||
- Another Youtube video is [Escaping Cabal Hell with Nix](https://www.youtube.com/watch?v=mQd3s57n_2Y),
|
||||
which discusses the subject of Haskell development with Nix but also provides
|
||||
a basic introduction to Nix as well, i.e. it's suitable for viewers with
|
||||
almost no prior Nix experience.
|
||||
|
||||
- Oliver Charles wrote a very nice [Tutorial how to develop Haskell packages with Nix](http://wiki.ocharles.org.uk/Nix).
|
||||
|
||||
- The *Journey into the Haskell NG infrastructure* series of postings
|
||||
describe the new Haskell infrastructure in great detail:
|
||||
|
||||
- [Part 1](http://lists.science.uu.nl/pipermail/nix-dev/2015-January/015591.html)
|
||||
explains the differences between the old and the new code and gives
|
||||
instructions how to migrate to the new setup.
|
||||
|
||||
- [Part 2](http://lists.science.uu.nl/pipermail/nix-dev/2015-January/015608.html)
|
||||
looks in-depth at how to tweak and configure your setup by means of
|
||||
overrides.
|
||||
|
||||
- [Part 3](http://lists.science.uu.nl/pipermail/nix-dev/2015-April/016912.html)
|
||||
describes the infrastructure that keeps the Haskell package set in Nixpkgs
|
||||
up-to-date.
|
||||
@@ -1,33 +0,0 @@
|
||||
<chapter xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xi="http://www.w3.org/2001/XInclude"
|
||||
xml:id="chap-language-support">
|
||||
|
||||
<title>Support for specific programming languages and frameworks</title>
|
||||
|
||||
<para>The <link linkend="chap-stdenv">standard build
|
||||
environment</link> makes it easy to build typical Autotools-based
|
||||
packages with very little code. Any other kind of package can be
|
||||
accomodated by overriding the appropriate phases of
|
||||
<literal>stdenv</literal>. However, there are specialised functions
|
||||
in Nixpkgs to easily build packages for other programming languages,
|
||||
such as Perl or Haskell. These are described in this chapter.</para>
|
||||
|
||||
|
||||
<xi:include href="beam.xml" />
|
||||
<xi:include href="bower.xml" />
|
||||
<xi:include href="coq.xml" />
|
||||
<xi:include href="go.xml" />
|
||||
<xi:include href="haskell.xml" />
|
||||
<xi:include href="idris.xml" /> <!-- generated from ../../pkgs/development/idris-modules/README.md -->
|
||||
<xi:include href="java.xml" />
|
||||
<xi:include href="lua.xml" />
|
||||
<xi:include href="node.xml" /> <!-- generated from ../../pkgs/development/node-packages/README.md -->
|
||||
<xi:include href="perl.xml" />
|
||||
<xi:include href="python.xml" />
|
||||
<xi:include href="qt.xml" />
|
||||
<xi:include href="r.xml" /> <!-- generated from ../../pkgs/development/r-modules/README.md -->
|
||||
<xi:include href="ruby.xml" />
|
||||
<xi:include href="texlive.xml" />
|
||||
|
||||
|
||||
</chapter>
|
||||
@@ -1,84 +0,0 @@
|
||||
<section xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="sec-language-java">
|
||||
|
||||
<title>Java</title>
|
||||
|
||||
<para>Ant-based Java packages are typically built from source as follows:
|
||||
|
||||
<programlisting>
|
||||
stdenv.mkDerivation {
|
||||
name = "...";
|
||||
src = fetchurl { ... };
|
||||
|
||||
buildInputs = [ jdk ant ];
|
||||
|
||||
buildPhase = "ant";
|
||||
}
|
||||
</programlisting>
|
||||
|
||||
Note that <varname>jdk</varname> is an alias for the OpenJDK.</para>
|
||||
|
||||
<para>JAR files that are intended to be used by other packages should
|
||||
be installed in <filename>$out/share/java</filename>. The OpenJDK has
|
||||
a stdenv setup hook that adds any JARs in the
|
||||
<filename>share/java</filename> directories of the build inputs to the
|
||||
<envar>CLASSPATH</envar> environment variable. For instance, if the
|
||||
package <literal>libfoo</literal> installs a JAR named
|
||||
<filename>foo.jar</filename> in its <filename>share/java</filename>
|
||||
directory, and another package declares the attribute
|
||||
|
||||
<programlisting>
|
||||
buildInputs = [ jdk libfoo ];
|
||||
</programlisting>
|
||||
|
||||
then <envar>CLASSPATH</envar> will be set to
|
||||
<filename>/nix/store/...-libfoo/share/java/foo.jar</filename>.</para>
|
||||
|
||||
<para>Private JARs
|
||||
should be installed in a location like
|
||||
<filename>$out/share/<replaceable>package-name</replaceable></filename>.</para>
|
||||
|
||||
<para>If your Java package provides a program, you need to generate a
|
||||
wrapper script to run it using the OpenJRE. You can use
|
||||
<literal>makeWrapper</literal> for this:
|
||||
|
||||
<programlisting>
|
||||
buildInputs = [ makeWrapper ];
|
||||
|
||||
installPhase =
|
||||
''
|
||||
mkdir -p $out/bin
|
||||
makeWrapper ${jre}/bin/java $out/bin/foo \
|
||||
--add-flags "-cp $out/share/java/foo.jar org.foo.Main"
|
||||
'';
|
||||
</programlisting>
|
||||
|
||||
Note the use of <literal>jre</literal>, which is the part of the
|
||||
OpenJDK package that contains the Java Runtime Environment. By using
|
||||
<literal>${jre}/bin/java</literal> instead of
|
||||
<literal>${jdk}/bin/java</literal>, you prevent your package from
|
||||
depending on the JDK at runtime.</para>
|
||||
|
||||
<para>It is possible to use a different Java compiler than
|
||||
<command>javac</command> from the OpenJDK. For instance, to use the
|
||||
Eclipse Java Compiler:
|
||||
|
||||
<programlisting>
|
||||
buildInputs = [ jre ant ecj ];
|
||||
</programlisting>
|
||||
|
||||
(Note that here you don’t need the full JDK as an input, but just the
|
||||
JRE.) The ECJ has a stdenv setup hook that sets some environment
|
||||
variables to cause Ant to use ECJ, but this doesn’t work with all Ant
|
||||
files. Similarly, you can use the GNU Java Compiler:
|
||||
|
||||
<programlisting>
|
||||
buildInputs = [ gcj ant ];
|
||||
</programlisting>
|
||||
|
||||
Here, Ant will automatically use <command>gij</command> (the GNU Java
|
||||
Runtime) instead of the OpenJRE.</para>
|
||||
|
||||
</section>
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
<section xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="sec-language-lua">
|
||||
|
||||
<title>Lua</title>
|
||||
|
||||
<para>
|
||||
Lua packages are built by the <varname>buildLuaPackage</varname> function. This function is
|
||||
implemented
|
||||
in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/lua-modules/generic/default.nix">
|
||||
<filename>pkgs/development/lua-modules/generic/default.nix</filename></link>
|
||||
and works similarly to <varname>buildPerlPackage</varname>. (See
|
||||
<xref linkend="sec-language-perl"/> for details.)
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Lua packages are defined
|
||||
in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/lua-packages.nix"><filename>pkgs/top-level/lua-packages.nix</filename></link>.
|
||||
Most of them are simple. For example:
|
||||
|
||||
<programlisting>
|
||||
fileSystem = buildLuaPackage {
|
||||
name = "filesystem-1.6.2";
|
||||
src = fetchurl {
|
||||
url = "https://github.com/keplerproject/luafilesystem/archive/v1_6_2.tar.gz";
|
||||
sha256 = "1n8qdwa20ypbrny99vhkmx8q04zd2jjycdb5196xdhgvqzk10abz";
|
||||
};
|
||||
meta = {
|
||||
homepage = "https://github.com/keplerproject/luafilesystem";
|
||||
hydraPlatforms = stdenv.lib.platforms.linux;
|
||||
maintainers = with maintainers; [ flosse ];
|
||||
};
|
||||
};
|
||||
</programlisting>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Though, more complicated package should be placed in a seperate file in
|
||||
<link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/lua-modules"><filename>pkgs/development/lua-modules</filename></link>.
|
||||
</para>
|
||||
<para>
|
||||
Lua packages accept additional parameter <varname>disabled</varname>, which defines
|
||||
the condition of disabling package from luaPackages. For example, if package has
|
||||
<varname>disabled</varname> assigned to <literal>lua.luaversion != "5.1"</literal>,
|
||||
it will not be included in any luaPackages except lua51Packages, making it
|
||||
only be built for lua 5.1.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
@@ -1,181 +0,0 @@
|
||||
<section xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="sec-language-perl">
|
||||
|
||||
<title>Perl</title>
|
||||
|
||||
<para>Nixpkgs provides a function <varname>buildPerlPackage</varname>,
|
||||
a generic package builder function for any Perl package that has a
|
||||
standard <varname>Makefile.PL</varname>. It’s implemented in <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/perl-modules/generic"><filename>pkgs/development/perl-modules/generic</filename></link>.</para>
|
||||
|
||||
<para>Perl packages from CPAN are defined in <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/perl-packages.nix"><filename>pkgs/top-level/perl-packages.nix</filename></link>,
|
||||
rather than <filename>pkgs/all-packages.nix</filename>. Most Perl
|
||||
packages are so straight-forward to build that they are defined here
|
||||
directly, rather than having a separate function for each package
|
||||
called from <filename>perl-packages.nix</filename>. However, more
|
||||
complicated packages should be put in a separate file, typically in
|
||||
<filename>pkgs/development/perl-modules</filename>. Here is an
|
||||
example of the former:
|
||||
|
||||
<programlisting>
|
||||
ClassC3 = buildPerlPackage rec {
|
||||
name = "Class-C3-0.21";
|
||||
src = fetchurl {
|
||||
url = "mirror://cpan/authors/id/F/FL/FLORA/${name}.tar.gz";
|
||||
sha256 = "1bl8z095y4js66pwxnm7s853pi9czala4sqc743fdlnk27kq94gz";
|
||||
};
|
||||
};
|
||||
</programlisting>
|
||||
|
||||
Note the use of <literal>mirror://cpan/</literal>, and the
|
||||
<literal>${name}</literal> in the URL definition to ensure that the
|
||||
name attribute is consistent with the source that we’re actually
|
||||
downloading. Perl packages are made available in
|
||||
<filename>all-packages.nix</filename> through the variable
|
||||
<varname>perlPackages</varname>. For instance, if you have a package
|
||||
that needs <varname>ClassC3</varname>, you would typically write
|
||||
|
||||
<programlisting>
|
||||
foo = import ../path/to/foo.nix {
|
||||
inherit stdenv fetchurl ...;
|
||||
inherit (perlPackages) ClassC3;
|
||||
};
|
||||
</programlisting>
|
||||
|
||||
in <filename>all-packages.nix</filename>. You can test building a
|
||||
Perl package as follows:
|
||||
|
||||
<screen>
|
||||
$ nix-build -A perlPackages.ClassC3
|
||||
</screen>
|
||||
|
||||
<varname>buildPerlPackage</varname> adds <literal>perl-</literal> to
|
||||
the start of the name attribute, so the package above is actually
|
||||
called <literal>perl-Class-C3-0.21</literal>. So to install it, you
|
||||
can say:
|
||||
|
||||
<screen>
|
||||
$ nix-env -i perl-Class-C3
|
||||
</screen>
|
||||
|
||||
(Of course you can also install using the attribute name:
|
||||
<literal>nix-env -i -A perlPackages.ClassC3</literal>.)</para>
|
||||
|
||||
<para>So what does <varname>buildPerlPackage</varname> do? It does
|
||||
the following:
|
||||
|
||||
<orderedlist>
|
||||
|
||||
<listitem><para>In the configure phase, it calls <literal>perl
|
||||
Makefile.PL</literal> to generate a Makefile. You can set the
|
||||
variable <varname>makeMakerFlags</varname> to pass flags to
|
||||
<filename>Makefile.PL</filename></para></listitem>
|
||||
|
||||
<listitem><para>It adds the contents of the <envar>PERL5LIB</envar>
|
||||
environment variable to <literal>#! .../bin/perl</literal> line of
|
||||
Perl scripts as <literal>-I<replaceable>dir</replaceable></literal>
|
||||
flags. This ensures that a script can find its
|
||||
dependencies.</para></listitem>
|
||||
|
||||
<listitem><para>In the fixup phase, it writes the propagated build
|
||||
inputs (<varname>propagatedBuildInputs</varname>) to the file
|
||||
<filename>$out/nix-support/propagated-user-env-packages</filename>.
|
||||
<command>nix-env</command> recursively installs all packages listed
|
||||
in this file when you install a package that has it. This ensures
|
||||
that a Perl package can find its dependencies.</para></listitem>
|
||||
|
||||
</orderedlist>
|
||||
|
||||
</para>
|
||||
|
||||
<para><varname>buildPerlPackage</varname> is built on top of
|
||||
<varname>stdenv</varname>, so everything can be customised in the
|
||||
usual way. For instance, the <literal>BerkeleyDB</literal> module has
|
||||
a <varname>preConfigure</varname> hook to generate a configuration
|
||||
file used by <filename>Makefile.PL</filename>:
|
||||
|
||||
<programlisting>
|
||||
{ buildPerlPackage, fetchurl, db }:
|
||||
|
||||
buildPerlPackage rec {
|
||||
name = "BerkeleyDB-0.36";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://cpan/authors/id/P/PM/PMQS/${name}.tar.gz";
|
||||
sha256 = "07xf50riarb60l1h6m2dqmql8q5dij619712fsgw7ach04d8g3z1";
|
||||
};
|
||||
|
||||
preConfigure = ''
|
||||
echo "LIB = ${db}/lib" > config.in
|
||||
echo "INCLUDE = ${db}/include" >> config.in
|
||||
'';
|
||||
}
|
||||
</programlisting>
|
||||
|
||||
</para>
|
||||
|
||||
<para>Dependencies on other Perl packages can be specified in the
|
||||
<varname>buildInputs</varname> and
|
||||
<varname>propagatedBuildInputs</varname> attributes. If something is
|
||||
exclusively a build-time dependency, use
|
||||
<varname>buildInputs</varname>; if it’s (also) a runtime dependency,
|
||||
use <varname>propagatedBuildInputs</varname>. For instance, this
|
||||
builds a Perl module that has runtime dependencies on a bunch of other
|
||||
modules:
|
||||
|
||||
<programlisting>
|
||||
ClassC3Componentised = buildPerlPackage rec {
|
||||
name = "Class-C3-Componentised-1.0004";
|
||||
src = fetchurl {
|
||||
url = "mirror://cpan/authors/id/A/AS/ASH/${name}.tar.gz";
|
||||
sha256 = "0xql73jkcdbq4q9m0b0rnca6nrlvf5hyzy8is0crdk65bynvs8q1";
|
||||
};
|
||||
propagatedBuildInputs = [
|
||||
ClassC3 ClassInspector TestException MROCompat
|
||||
];
|
||||
};
|
||||
</programlisting>
|
||||
|
||||
</para>
|
||||
|
||||
<section xml:id="ssec-generation-from-CPAN"><title>Generation from CPAN</title>
|
||||
|
||||
<para>Nix expressions for Perl packages can be generated (almost)
|
||||
automatically from CPAN. This is done by the program
|
||||
<command>nix-generate-from-cpan</command>, which can be installed
|
||||
as follows:</para>
|
||||
|
||||
<screen>
|
||||
$ nix-env -i nix-generate-from-cpan
|
||||
</screen>
|
||||
|
||||
<para>This program takes a Perl module name, looks it up on CPAN,
|
||||
fetches and unpacks the corresponding package, and prints a Nix
|
||||
expression on standard output. For example:
|
||||
|
||||
<screen>
|
||||
$ nix-generate-from-cpan XML::Simple
|
||||
XMLSimple = buildPerlPackage rec {
|
||||
name = "XML-Simple-2.22";
|
||||
src = fetchurl {
|
||||
url = "mirror://cpan/authors/id/G/GR/GRANTM/${name}.tar.gz";
|
||||
sha256 = "b9450ef22ea9644ae5d6ada086dc4300fa105be050a2030ebd4efd28c198eb49";
|
||||
};
|
||||
propagatedBuildInputs = [ XMLNamespaceSupport XMLSAX XMLSAXExpat ];
|
||||
meta = {
|
||||
description = "An API for simple XML files";
|
||||
license = with stdenv.lib.licenses; [ artistic1 gpl1Plus ];
|
||||
};
|
||||
};
|
||||
</screen>
|
||||
|
||||
The output can be pasted into
|
||||
<filename>pkgs/top-level/perl-packages.nix</filename> or wherever else
|
||||
you need it.</para>
|
||||
|
||||
</section>
|
||||
|
||||
</section>
|
||||
|
||||
@@ -1,817 +0,0 @@
|
||||
# Python
|
||||
|
||||
## User Guide
|
||||
|
||||
Several versions of Python are available on Nix as well as a high amount of
|
||||
packages. The default interpreter is CPython 3.5.
|
||||
|
||||
### Using Python
|
||||
|
||||
#### Installing Python and packages
|
||||
|
||||
It is important to make a distinction between Python packages that are
|
||||
used as libraries, and applications that are written in Python.
|
||||
|
||||
Applications on Nix are installed typically into your user
|
||||
profile imperatively using `nix-env -i`, and on NixOS declaratively by adding the
|
||||
package name to `environment.systemPackages` in `/etc/nixos/configuration.nix`.
|
||||
Dependencies such as libraries are automatically installed and should not be
|
||||
installed explicitly.
|
||||
|
||||
The same goes for Python applications and libraries. Python applications can be
|
||||
installed in your profile, but Python libraries you would like to use to develop
|
||||
cannot. If you do install libraries in your profile, then you will end up with
|
||||
import errors.
|
||||
|
||||
#### Python environments using `nix-shell`
|
||||
|
||||
The recommended method for creating Python environments for development is with
|
||||
`nix-shell`. Executing
|
||||
|
||||
```sh
|
||||
$ nix-shell -p python35Packages.numpy python35Packages.toolz
|
||||
```
|
||||
|
||||
opens a Nix shell which has available the requested packages and dependencies.
|
||||
Now you can launch the Python interpreter (which is itself a dependency)
|
||||
|
||||
```sh
|
||||
[nix-shell:~] python3
|
||||
```
|
||||
|
||||
If the packages were not available yet in the Nix store, Nix would download or
|
||||
build them automatically. A convenient option with `nix-shell` is the `--run`
|
||||
option, with which you can execute a command in the `nix-shell`. Let's say we
|
||||
want the above environment and directly run the Python interpreter
|
||||
|
||||
```sh
|
||||
$ nix-shell -p python35Packages.numpy python35Packages.toolz --run "python3"
|
||||
```
|
||||
|
||||
This way you can use the `--run` option also to directly run a script
|
||||
|
||||
```sh
|
||||
$ nix-shell -p python35Packages.numpy python35Packages.toolz --run "python3 myscript.py"
|
||||
```
|
||||
|
||||
In fact, for this specific use case there is a more convenient method. You can
|
||||
add a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) to your script
|
||||
specifying which dependencies Nix shell needs. With the following shebang, you
|
||||
can use `nix-shell myscript.py` and it will make available all dependencies and
|
||||
run the script in the `python3` shell.
|
||||
|
||||
```py
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i python3 -p python3Packages.numpy
|
||||
|
||||
import numpy
|
||||
|
||||
print(numpy.__version__)
|
||||
```
|
||||
|
||||
Likely you do not want to type your dependencies each and every time. What you
|
||||
can do is write a simple Nix expression which sets up an environment for you,
|
||||
requiring you only to type `nix-shell`. Say we want to have Python 3.5, `numpy`
|
||||
and `toolz`, like before, in an environment. With a `shell.nix` file
|
||||
containing
|
||||
|
||||
```nix
|
||||
with import <nixpkgs> {};
|
||||
|
||||
(pkgs.python35.withPackages (ps: [ps.numpy ps.toolz])).env
|
||||
```
|
||||
executing `nix-shell` gives you again a Nix shell from which you can run Python.
|
||||
|
||||
What's happening here?
|
||||
|
||||
1. We begin with importing the Nix Packages collections. `import <nixpkgs>` import the `<nixpkgs>` function, `{}` calls it and the `with` statement brings all attributes of `nixpkgs` in the local scope. Therefore we can now use `pkgs`.
|
||||
2. Then we create a Python 3.5 environment with the `withPackages` function.
|
||||
3. The `withPackages` function expects us to provide a function as an argument that takes the set of all python packages and returns a list of packages to include in the environment. Here, we select the packages `numpy` and `toolz` from the package set.
|
||||
4. And finally, for in interactive use we return the environment by using the `env` attribute.
|
||||
|
||||
### Developing with Python
|
||||
|
||||
|
||||
Now that you know how to get a working Python environment on Nix, it is time to go forward and start actually developing with Python.
|
||||
We will first have a look at how Python packages are packaged on Nix. Then, we will look how you can use development mode with your code.
|
||||
|
||||
#### Python packaging on Nix
|
||||
|
||||
On Nix all packages are built by functions. The main function in Nix for building Python packages is [`buildPythonPackage`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/interpreters/python/build-python-package.nix).
|
||||
Let's see how we would build the `toolz` package. According to [`python-packages.nix`](https://raw.githubusercontent.com/NixOS/nixpkgs/master/pkgs/top-level/python-packages.nix) `toolz` is build using
|
||||
|
||||
```nix
|
||||
toolz = buildPythonPackage rec{
|
||||
name = "toolz-${version}";
|
||||
version = "0.7.4";
|
||||
|
||||
src = pkgs.fetchurl{
|
||||
url = "mirror://pypi/t/toolz/toolz-${version}.tar.gz";
|
||||
sha256 = "43c2c9e5e7a16b6c88ba3088a9bfc82f7db8e13378be7c78d6c14a5f8ed05afd";
|
||||
};
|
||||
|
||||
meta = {
|
||||
homepage = "http://github.com/pytoolz/toolz/";
|
||||
description = "List processing tools and functional utilities";
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ fridh ];
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
What happens here? The function `buildPythonPackage` is called and as argument
|
||||
it accepts a set. In this case the set is a recursive set ([`rec`](http://nixos.org/nix/manual/#sec-constructs)).
|
||||
One of the arguments is the name of the package, which consists of a basename
|
||||
(generally following the name on PyPi) and a version. Another argument, `src`
|
||||
specifies the source, which in this case is fetched from an url. `fetchurl` not
|
||||
only downloads the target file, but also validates its hash. Furthermore, we
|
||||
specify some (optional) [meta information](http://nixos.org/nixpkgs/manual/#chap-meta).
|
||||
|
||||
The output of the function is a derivation, which is an attribute with the name
|
||||
`toolz` of the set `pythonPackages`. Actually, sets are created for all interpreter versions,
|
||||
so `python27Packages`, `python34Packages`, `python35Packages` and `pypyPackages`.
|
||||
|
||||
The above example works when you're directly working on
|
||||
`pkgs/top-level/python-packages.nix` in the Nixpkgs repository. Often though,
|
||||
you will want to test a Nix expression outside of the Nixpkgs tree. If you
|
||||
create a `shell.nix` file with the following contents
|
||||
|
||||
```nix
|
||||
with import <nixpkgs> {};
|
||||
|
||||
pkgs.python35Packages.buildPythonPackage rec {
|
||||
name = "toolz-${version}";
|
||||
version = "0.8.0";
|
||||
|
||||
src = pkgs.fetchurl{
|
||||
url = "mirror://pypi/t/toolz/toolz-${version}.tar.gz";
|
||||
sha256 = "e8451af61face57b7c5d09e71c0d27b8005f001ead56e9fdf470417e5cc6d479";
|
||||
};
|
||||
|
||||
doCheck = false;
|
||||
|
||||
meta = {
|
||||
homepage = "http://github.com/pytoolz/toolz/";
|
||||
description = "List processing tools and functional utilities";
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ fridh ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
and then execute `nix-shell` will result in an environment in which you can use
|
||||
Python 3.5 and the `toolz` package. As you can see we had to explicitly mention
|
||||
for which Python version we want to build a package.
|
||||
|
||||
The above example considered only a single package. Generally you will want to use multiple packages.
|
||||
If we create a `shell.nix` file with the following contents
|
||||
|
||||
```nix
|
||||
with import <nixpkgs> {};
|
||||
|
||||
( let
|
||||
toolz = pkgs.python35Packages.buildPythonPackage rec {
|
||||
name = "toolz-${version}";
|
||||
version = "0.8.0";
|
||||
|
||||
src = pkgs.fetchurl{
|
||||
url = "mirror://pypi/t/toolz/toolz-${version}.tar.gz";
|
||||
sha256 = "e8451af61face57b7c5d09e71c0d27b8005f001ead56e9fdf470417e5cc6d479";
|
||||
};
|
||||
|
||||
doCheck = false;
|
||||
|
||||
meta = {
|
||||
homepage = "http://github.com/pytoolz/toolz/";
|
||||
description = "List processing tools and functional utilities";
|
||||
};
|
||||
};
|
||||
|
||||
in pkgs.python35.withPackages (ps: [ps.numpy toolz])
|
||||
).env
|
||||
```
|
||||
|
||||
and again execute `nix-shell`, then we get a Python 3.5 environment with our
|
||||
locally defined package as well as `numpy` which is build according to the
|
||||
definition in Nixpkgs. What did we do here? Well, we took the Nix expression
|
||||
that we used earlier to build a Python environment, and said that we wanted to
|
||||
include our own version of `toolz`. To introduce our own package in the scope of
|
||||
`withPackages` we used a
|
||||
[`let`](http://nixos.org/nix/manual/#sec-constructs) expression.
|
||||
You can see that we used `ps.numpy` to select numpy from the nixpkgs package set (`ps`).
|
||||
But we do not take `toolz` from the nixpkgs package set this time.
|
||||
Instead, `toolz` will resolve to our local definition that we introduced with `let`.
|
||||
|
||||
### Handling dependencies
|
||||
|
||||
Our example, `toolz`, doesn't have any dependencies on other Python
|
||||
packages or system libraries. According to the manual, `buildPythonPackage`
|
||||
uses the arguments `buildInputs` and `propagatedBuildInputs` to specify dependencies. If something is
|
||||
exclusively a build-time dependency, then the dependency should be included as a
|
||||
`buildInput`, but if it is (also) a runtime dependency, then it should be added
|
||||
to `propagatedBuildInputs`. Test dependencies are considered build-time dependencies.
|
||||
|
||||
The following example shows which arguments are given to `buildPythonPackage` in
|
||||
order to build [`datashape`](https://github.com/blaze/datashape).
|
||||
|
||||
```nix
|
||||
datashape = buildPythonPackage rec {
|
||||
name = "datashape-${version}";
|
||||
version = "0.4.7";
|
||||
|
||||
src = pkgs.fetchurl {
|
||||
url = "mirror://pypi/D/DataShape/${name}.tar.gz";
|
||||
sha256 = "14b2ef766d4c9652ab813182e866f493475e65e558bed0822e38bf07bba1a278";
|
||||
};
|
||||
|
||||
buildInputs = with self; [ pytest ];
|
||||
propagatedBuildInputs = with self; [ numpy multipledispatch dateutil ];
|
||||
|
||||
meta = {
|
||||
homepage = https://github.com/ContinuumIO/datashape;
|
||||
description = "A data description language";
|
||||
license = licenses.bsd2;
|
||||
maintainers = with maintainers; [ fridh ];
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
We can see several runtime dependencies, `numpy`, `multipledispatch`, and
|
||||
`dateutil`. Furthermore, we have one `buildInput`, i.e. `pytest`. `pytest` is a
|
||||
test runner and is only used during the `checkPhase` and is therefore not added
|
||||
to `propagatedBuildInputs`.
|
||||
|
||||
In the previous case we had only dependencies on other Python packages to consider.
|
||||
Occasionally you have also system libraries to consider. E.g., `lxml` provides
|
||||
Python bindings to `libxml2` and `libxslt`. These libraries are only required
|
||||
when building the bindings and are therefore added as `buildInputs`.
|
||||
|
||||
```nix
|
||||
lxml = buildPythonPackage rec {
|
||||
name = "lxml-3.4.4";
|
||||
|
||||
src = pkgs.fetchurl {
|
||||
url = "mirror://pypi/l/lxml/${name}.tar.gz";
|
||||
sha256 = "16a0fa97hym9ysdk3rmqz32xdjqmy4w34ld3rm3jf5viqjx65lxk";
|
||||
};
|
||||
|
||||
buildInputs = with self; [ pkgs.libxml2 pkgs.libxslt ];
|
||||
|
||||
meta = {
|
||||
description = "Pythonic binding for the libxml2 and libxslt libraries";
|
||||
homepage = http://lxml.de;
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ sjourdois ];
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
In this example `lxml` and Nix are able to work out exactly where the relevant
|
||||
files of the dependencies are. This is not always the case.
|
||||
|
||||
The example below shows bindings to The Fastest Fourier Transform in the West, commonly known as
|
||||
FFTW. On Nix we have separate packages of FFTW for the different types of floats
|
||||
(`"single"`, `"double"`, `"long-double"`). The bindings need all three types,
|
||||
and therefore we add all three as `buildInputs`. The bindings don't expect to
|
||||
find each of them in a different folder, and therefore we have to set `LDFLAGS`
|
||||
and `CFLAGS`.
|
||||
|
||||
```nix
|
||||
pyfftw = buildPythonPackage rec {
|
||||
name = "pyfftw-${version}";
|
||||
version = "0.9.2";
|
||||
|
||||
src = pkgs.fetchurl {
|
||||
url = "mirror://pypi/p/pyFFTW/pyFFTW-${version}.tar.gz";
|
||||
sha256 = "f6bbb6afa93085409ab24885a1a3cdb8909f095a142f4d49e346f2bd1b789074";
|
||||
};
|
||||
|
||||
buildInputs = [ pkgs.fftw pkgs.fftwFloat pkgs.fftwLongDouble];
|
||||
|
||||
propagatedBuildInputs = with self; [ numpy scipy ];
|
||||
|
||||
# Tests cannot import pyfftw. pyfftw works fine though.
|
||||
doCheck = false;
|
||||
|
||||
LDFLAGS="-L${pkgs.fftw.dev}/lib -L${pkgs.fftwFloat.out}/lib -L${pkgs.fftwLongDouble.out}/lib"
|
||||
CFLAGS="-I${pkgs.fftw.dev}/include -I${pkgs.fftwFloat.dev}/include -I${pkgs.fftwLongDouble.dev}/include"
|
||||
'';
|
||||
|
||||
meta = {
|
||||
description = "A pythonic wrapper around FFTW, the FFT library, presenting a unified interface for all the supported transforms";
|
||||
homepage = http://hgomersall.github.com/pyFFTW/;
|
||||
license = with licenses; [ bsd2 bsd3 ];
|
||||
maintainer = with maintainers; [ fridh ];
|
||||
};
|
||||
};
|
||||
```
|
||||
Note also the line `doCheck = false;`, we explicitly disabled running the test-suite.
|
||||
|
||||
|
||||
#### Develop local package
|
||||
|
||||
As a Python developer you're likely aware of [development mode](http://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode) (`python setup.py develop`);
|
||||
instead of installing the package this command creates a special link to the project code.
|
||||
That way, you can run updated code without having to reinstall after each and every change you make.
|
||||
Development mode is also available. Let's see how you can use it.
|
||||
|
||||
In the previous Nix expression the source was fetched from an url. We can also refer to a local source instead using
|
||||
|
||||
```nix
|
||||
src = ./path/to/source/tree;
|
||||
```
|
||||
|
||||
If we create a `shell.nix` file which calls `buildPythonPackage`, and if `src`
|
||||
is a local source, and if the local source has a `setup.py`, then development
|
||||
mode is activated.
|
||||
|
||||
In the following example we create a simple environment that
|
||||
has a Python 3.5 version of our package in it, as well as its dependencies and
|
||||
other packages we like to have in the environment, all specified with `propagatedBuildInputs`.
|
||||
Indeed, we can just add any package we like to have in our environment to `propagatedBuildInputs`.
|
||||
|
||||
```nix
|
||||
with import <nixpkgs>;
|
||||
with pkgs.python35Packages;
|
||||
|
||||
buildPythonPackage rec {
|
||||
name = "mypackage";
|
||||
src = ./path/to/package/source;
|
||||
propagatedBuildInputs = [ pytest numpy pkgs.libsndfile ];
|
||||
};
|
||||
```
|
||||
|
||||
It is important to note that due to how development mode is implemented on Nix it is not possible to have multiple packages simultaneously in development mode.
|
||||
|
||||
|
||||
### Organising your packages
|
||||
|
||||
So far we discussed how you can use Python on Nix, and how you can develop with
|
||||
it. We've looked at how you write expressions to package Python packages, and we
|
||||
looked at how you can create environments in which specified packages are
|
||||
available.
|
||||
|
||||
At some point you'll likely have multiple packages which you would
|
||||
like to be able to use in different projects. In order to minimise unnecessary
|
||||
duplication we now look at how you can maintain yourself a repository with your
|
||||
own packages. The important functions here are `import` and `callPackage`.
|
||||
|
||||
### Including a derivation using `callPackage`
|
||||
|
||||
Earlier we created a Python environment using `withPackages`, and included the
|
||||
`toolz` package via a `let` expression.
|
||||
Let's split the package definition from the environment definition.
|
||||
|
||||
We first create a function that builds `toolz` in `~/path/to/toolz/release.nix`
|
||||
|
||||
```nix
|
||||
{ pkgs, buildPythonPackage }:
|
||||
|
||||
buildPythonPackage rec {
|
||||
name = "toolz-${version}";
|
||||
version = "0.7.4";
|
||||
|
||||
src = pkgs.fetchurl{
|
||||
url = "mirror://pypi/t/toolz/toolz-${version}.tar.gz";
|
||||
sha256 = "43c2c9e5e7a16b6c88ba3088a9bfc82f7db8e13378be7c78d6c14a5f8ed05afd";
|
||||
};
|
||||
|
||||
meta = {
|
||||
homepage = "http://github.com/pytoolz/toolz/";
|
||||
description = "List processing tools and functional utilities";
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ fridh ];
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
It takes two arguments, `pkgs` and `buildPythonPackage`.
|
||||
We now call this function using `callPackage` in the definition of our environment
|
||||
|
||||
```nix
|
||||
with import <nixpkgs> {};
|
||||
|
||||
( let
|
||||
toolz = pkgs.callPackage ~/path/to/toolz/release.nix { pkgs=pkgs; buildPythonPackage=pkgs.python35Packages.buildPythonPackage; };
|
||||
in pkgs.python35.withPackages (ps: [ ps.numpy toolz ])
|
||||
).env
|
||||
```
|
||||
|
||||
Important to remember is that the Python version for which the package is made
|
||||
depends on the `python` derivation that is passed to `buildPythonPackage`. Nix
|
||||
tries to automatically pass arguments when possible, which is why generally you
|
||||
don't explicitly define which `python` derivation should be used. In the above
|
||||
example we use `buildPythonPackage` that is part of the set `python35Packages`,
|
||||
and in this case the `python35` interpreter is automatically used.
|
||||
|
||||
|
||||
|
||||
## Reference
|
||||
|
||||
### Interpreters
|
||||
|
||||
Versions 2.6, 2.7, 3.3, 3.4 and 3.5 of the CPython interpreter are available as respectively
|
||||
`python26`, `python27`, `python33`, `python34` and `python35`. The PyPy interpreter
|
||||
is available as `pypy`. The aliases `python2` and `python3` correspond to respectively `python27` and
|
||||
`python35`. The default interpreter, `python`, maps to `python2`.
|
||||
The Nix expressions for the interpreters can be found in
|
||||
`pkgs/development/interpreters/python`.
|
||||
|
||||
All packages depending on any Python interpreter get appended
|
||||
`out/{python.sitePackages}` to `$PYTHONPATH` if such directory
|
||||
exists.
|
||||
|
||||
#### Missing `tkinter` module standard library
|
||||
|
||||
To reduce closure size the `Tkinter`/`tkinter` is available as a separate package, `pythonPackages.tkinter`.
|
||||
|
||||
#### Attributes on interpreters packages
|
||||
|
||||
Each interpreter has the following attributes:
|
||||
|
||||
- `libPrefix`. Name of the folder in `${python}/lib/` for corresponding interpreter.
|
||||
- `interpreter`. Alias for `${python}/bin/${executable}`.
|
||||
- `buildEnv`. Function to build python interpreter environments with extra packages bundled together. See section *python.buildEnv function* for usage and documentation.
|
||||
- `withPackages`. Simpler interface to `buildEnv`. See section *python.withPackages function* for usage and documentation.
|
||||
- `sitePackages`. Alias for `lib/${libPrefix}/site-packages`.
|
||||
- `executable`. Name of the interpreter executable, e.g. `python3.4`.
|
||||
- `pkgs`. Set of Python packages for that specific interpreter. The package set can be modified by overriding the interpreter and passing `packageOverrides`.
|
||||
|
||||
### Building packages and applications
|
||||
|
||||
Python libraries and applications that use `setuptools` or
|
||||
`distutils` are typically build with respectively the `buildPythonPackage` and
|
||||
`buildPythonApplication` functions. These two functions also support installing a `wheel`.
|
||||
|
||||
All Python packages reside in `pkgs/top-level/python-packages.nix` and all
|
||||
applications elsewhere. In case a package is used as both a library and an application,
|
||||
then the package should be in `pkgs/top-level/python-packages.nix` since only those packages are made
|
||||
available for all interpreter versions. The preferred location for library expressions is in
|
||||
`pkgs/development/python-modules`. It is important that these packages are
|
||||
called from `pkgs/top-level/python-packages.nix` and not elsewhere, to guarantee
|
||||
the right version of the package is built.
|
||||
|
||||
Based on the packages defined in `pkgs/top-level/python-packages.nix` an
|
||||
attribute set is created for each available Python interpreter. The available
|
||||
sets are
|
||||
|
||||
* `pkgs.python26Packages`
|
||||
* `pkgs.python27Packages`
|
||||
* `pkgs.python33Packages`
|
||||
* `pkgs.python34Packages`
|
||||
* `pkgs.python35Packages`
|
||||
* `pkgs.pypyPackages`
|
||||
|
||||
and the aliases
|
||||
|
||||
* `pkgs.python2Packages` pointing to `pkgs.python27Packages`
|
||||
* `pkgs.python3Packages` pointing to `pkgs.python35Packages`
|
||||
* `pkgs.pythonPackages` pointing to `pkgs.python2Packages`
|
||||
|
||||
#### `buildPythonPackage` function
|
||||
|
||||
The `buildPythonPackage` function is implemented in
|
||||
`pkgs/development/interpreters/python/build-python-package.nix`
|
||||
|
||||
The following is an example:
|
||||
|
||||
twisted = buildPythonPackage {
|
||||
name = "twisted-8.1.0";
|
||||
|
||||
src = pkgs.fetchurl {
|
||||
url = http://tmrc.mit.edu/mirror/twisted/Twisted/8.1/Twisted-8.1.0.tar.bz2;
|
||||
sha256 = "0q25zbr4xzknaghha72mq57kh53qw1bf8csgp63pm9sfi72qhirl";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [ self.ZopeInterface ];
|
||||
|
||||
meta = {
|
||||
homepage = http://twistedmatrix.com/;
|
||||
description = "Twisted, an event-driven networking engine written in Python";
|
||||
license = stdenv.lib.licenses.mit; };
|
||||
};
|
||||
|
||||
The `buildPythonPackage` mainly does four things:
|
||||
|
||||
* In the `buildPhase`, it calls `${python.interpreter} setup.py bdist_wheel` to
|
||||
build a wheel binary zipfile.
|
||||
* In the `installPhase`, it installs the wheel file using `pip install *.whl`.
|
||||
* In the `postFixup` phase, the `wrapPythonPrograms` bash function is called to
|
||||
wrap all programs in the `$out/bin/*` directory to include `$PATH`
|
||||
environment variable and add dependent libraries to script's `sys.path`.
|
||||
* In the `installCheck` phase, `${python.interpreter} setup.py test` is ran.
|
||||
|
||||
As in Perl, dependencies on other Python packages can be specified in the
|
||||
`buildInputs` and `propagatedBuildInputs` attributes. If something is
|
||||
exclusively a build-time dependency, use `buildInputs`; if it’s (also) a runtime
|
||||
dependency, use `propagatedBuildInputs`.
|
||||
|
||||
By default tests are run because `doCheck = true`. Test dependencies, like
|
||||
e.g. the test runner, should be added to `buildInputs`.
|
||||
|
||||
By default `meta.platforms` is set to the same value
|
||||
as the interpreter unless overriden otherwise.
|
||||
|
||||
##### `buildPythonPackage` parameters
|
||||
|
||||
All parameters from `mkDerivation` function are still supported.
|
||||
|
||||
* `namePrefix`: Prepended text to `${name}` parameter. Defaults to `"python3.3-"` for Python 3.3, etc. Set it to `""` if you're packaging an application or a command line tool.
|
||||
* `disabled`: If `true`, package is not build for particular python interpreter version. Grep around `pkgs/top-level/python-packages.nix` for examples.
|
||||
* `setupPyBuildFlags`: List of flags passed to `setup.py build_ext` command.
|
||||
* `pythonPath`: List of packages to be added into `$PYTHONPATH`. Packages in `pythonPath` are not propagated (contrary to `propagatedBuildInputs`).
|
||||
* `preShellHook`: Hook to execute commands before `shellHook`.
|
||||
* `postShellHook`: Hook to execute commands after `shellHook`.
|
||||
* `makeWrapperArgs`: A list of strings. Arguments to be passed to `makeWrapper`, which wraps generated binaries. By default, the arguments to `makeWrapper` set `PATH` and `PYTHONPATH` environment variables before calling the binary. Additional arguments here can allow a developer to set environment variables which will be available when the binary is run. For example, `makeWrapperArgs = ["--set FOO BAR" "--set BAZ QUX"]`.
|
||||
* `installFlags`: A list of strings. Arguments to be passed to `pip install`. To pass options to `python setup.py install`, use `--install-option`. E.g., `installFlags=["--install-option='--cpp_implementation'"].
|
||||
* `format`: Format of the source. Valid options are `setuptools` (default), `flit`, `wheel`, and `other`. `setuptools` is for when the source has a `setup.py` and `setuptools` is used to build a wheel, `flit`, in case `flit` should be used to build a wheel, and `wheel` in case a wheel is provided. In case you need to provide your own `buildPhase` and `installPhase` you can use `other`.
|
||||
* `catchConflicts` If `true`, abort package build if a package name appears more than once in dependency tree. Default is `true`.
|
||||
* `checkInputs` Dependencies needed for running the `checkPhase`. These are added to `buildInputs` when `doCheck = true`.
|
||||
|
||||
#### `buildPythonApplication` function
|
||||
|
||||
The `buildPythonApplication` function is practically the same as `buildPythonPackage`.
|
||||
The difference is that `buildPythonPackage` by default prefixes the names of the packages with the version of the interpreter.
|
||||
Because with an application we're not interested in multiple version the prefix is dropped.
|
||||
|
||||
#### python.buildEnv function
|
||||
|
||||
Python environments can be created using the low-level `pkgs.buildEnv` function.
|
||||
This example shows how to create an environment that has the Pyramid Web Framework.
|
||||
Saving the following as `default.nix`
|
||||
|
||||
with import <nixpkgs> {};
|
||||
|
||||
python.buildEnv.override {
|
||||
extraLibs = [ pkgs.pythonPackages.pyramid ];
|
||||
ignoreCollisions = true;
|
||||
}
|
||||
|
||||
and running `nix-build` will create
|
||||
|
||||
/nix/store/cf1xhjwzmdki7fasgr4kz6di72ykicl5-python-2.7.8-env
|
||||
|
||||
with wrapped binaries in `bin/`.
|
||||
|
||||
You can also use the `env` attribute to create local environments with needed
|
||||
packages installed. This is somewhat comparable to `virtualenv`. For example,
|
||||
running `nix-shell` with the following `shell.nix`
|
||||
|
||||
with import <nixpkgs> {};
|
||||
|
||||
(python3.buildEnv.override {
|
||||
extraLibs = with python3Packages; [ numpy requests2 ];
|
||||
}).env
|
||||
|
||||
will drop you into a shell where Python will have the
|
||||
specified packages in its path.
|
||||
|
||||
|
||||
##### `python.buildEnv` arguments
|
||||
|
||||
* `extraLibs`: List of packages installed inside the environment.
|
||||
* `postBuild`: Shell command executed after the build of environment.
|
||||
* `ignoreCollisions`: Ignore file collisions inside the environment (default is `false`).
|
||||
|
||||
#### python.withPackages function
|
||||
|
||||
The `python.withPackages` function provides a simpler interface to the `python.buildEnv` functionality.
|
||||
It takes a function as an argument that is passed the set of python packages and returns the list
|
||||
of the packages to be included in the environment. Using the `withPackages` function, the previous
|
||||
example for the Pyramid Web Framework environment can be written like this:
|
||||
|
||||
with import <nixpkgs> {};
|
||||
|
||||
python.withPackages (ps: [ps.pyramid])
|
||||
|
||||
`withPackages` passes the correct package set for the specific interpreter version as an
|
||||
argument to the function. In the above example, `ps` equals `pythonPackages`.
|
||||
But you can also easily switch to using python3:
|
||||
|
||||
with import <nixpkgs> {};
|
||||
|
||||
python3.withPackages (ps: [ps.pyramid])
|
||||
|
||||
Now, `ps` is set to `python3Packages`, matching the version of the interpreter.
|
||||
|
||||
As `python.withPackages` simply uses `python.buildEnv` under the hood, it also supports the `env`
|
||||
attribute. The `shell.nix` file from the previous section can thus be also written like this:
|
||||
|
||||
with import <nixpkgs> {};
|
||||
|
||||
(python33.withPackages (ps: [ps.numpy ps.requests2])).env
|
||||
|
||||
In contrast to `python.buildEnv`, `python.withPackages` does not support the more advanced options
|
||||
such as `ignoreCollisions = true` or `postBuild`. If you need them, you have to use `python.buildEnv`.
|
||||
|
||||
### Development mode
|
||||
|
||||
Development or editable mode is supported. To develop Python packages
|
||||
`buildPythonPackage` has additional logic inside `shellPhase` to run `pip
|
||||
install -e . --prefix $TMPDIR/`for the package.
|
||||
|
||||
Warning: `shellPhase` is executed only if `setup.py` exists.
|
||||
|
||||
Given a `default.nix`:
|
||||
|
||||
with import <nixpkgs> {};
|
||||
|
||||
buildPythonPackage { name = "myproject";
|
||||
|
||||
buildInputs = with pkgs.pythonPackages; [ pyramid ];
|
||||
|
||||
src = ./.; }
|
||||
|
||||
Running `nix-shell` with no arguments should give you
|
||||
the environment in which the package would be built with
|
||||
`nix-build`.
|
||||
|
||||
Shortcut to setup environments with C headers/libraries and python packages:
|
||||
|
||||
$ nix-shell -p pythonPackages.pyramid zlib libjpeg git
|
||||
|
||||
Note: There is a boolean value `lib.inNixShell` set to `true` if nix-shell is invoked.
|
||||
|
||||
### Tools
|
||||
|
||||
Packages inside nixpkgs are written by hand. However many tools exist in
|
||||
community to help save time. No tool is preferred at the moment.
|
||||
|
||||
- [python2nix](https://github.com/proger/python2nix) by Vladimir Kirillov
|
||||
- [pypi2nix](https://github.com/garbas/pypi2nix) by Rok Garbas
|
||||
- [pypi2nix](https://github.com/offlinehacker/pypi2nix) by Jaka Hudoklin
|
||||
|
||||
## FAQ
|
||||
|
||||
### How can I install a working Python environment?
|
||||
|
||||
As explained in the user's guide installing individual Python packages
|
||||
imperatively with `nix-env -i` or declaratively in `environment.systemPackages`
|
||||
is not supported. However, it is possible to install a Python environment with packages (`python.buildEnv`).
|
||||
|
||||
In the following examples we create an environment with Python 3.5, `numpy` and `ipython`.
|
||||
As you might imagine there is one limitation here, and that's you can install
|
||||
only one environment at a time. You will notice the complaints about collisions
|
||||
when you try to install a second environment.
|
||||
|
||||
#### Environment defined in separate `.nix` file
|
||||
|
||||
Create a file, e.g. `build.nix`, with the following expression
|
||||
```nix
|
||||
with import <nixpkgs> {};
|
||||
|
||||
pkgs.python35.withPackages (ps: with ps; [ numpy ipython ])
|
||||
```
|
||||
and install it in your profile with
|
||||
```
|
||||
nix-env -if build.nix
|
||||
```
|
||||
Now you can use the Python interpreter, as well as the extra packages that you added to the environment.
|
||||
|
||||
#### Environment defined in `~/.nixpkgs/config.nix`
|
||||
|
||||
If you prefer to, you could also add the environment as a package override to the Nixpkgs set.
|
||||
```
|
||||
packageOverrides = pkgs: with pkgs; {
|
||||
myEnv = python35.withPackages (ps: with ps; [ numpy ipython ]);
|
||||
};
|
||||
```
|
||||
and install it in your profile with
|
||||
```
|
||||
nix-env -iA nixpkgs.myEnv
|
||||
```
|
||||
We're installing using the attribute path and assume the channels is named `nixpkgs`.
|
||||
Note that I'm using the attribute path here.
|
||||
|
||||
#### Environment defined in `/etc/nixos/configuration.nix`
|
||||
|
||||
For the sake of completeness, here's another example how to install the environment system-wide.
|
||||
|
||||
```nix
|
||||
environment.systemPackages = with pkgs; [
|
||||
(python35.withPackages(ps: with ps; [ numpy ipython ]))
|
||||
];
|
||||
```
|
||||
|
||||
### How to solve circular dependencies?
|
||||
|
||||
Consider the packages `A` and `B` that depend on each other. When packaging `B`,
|
||||
a solution is to override package `A` not to depend on `B` as an input. The same
|
||||
should also be done when packaging `A`.
|
||||
|
||||
### How to override a Python package?
|
||||
|
||||
We can override the interpreter and pass `packageOverrides`.
|
||||
In the following example we rename the `pandas` package and build it.
|
||||
```nix
|
||||
with import <nixpkgs> {};
|
||||
|
||||
let
|
||||
python = let
|
||||
packageOverrides = self: super: {
|
||||
pandas = super.pandas.override {name="foo";};
|
||||
};
|
||||
in pkgs.python35.override {inherit packageOverrides;};
|
||||
|
||||
in python.pkgs.pandas
|
||||
```
|
||||
Using `nix-build` on this expression will build the package `pandas`
|
||||
but with the new name `foo`.
|
||||
|
||||
All packages in the package set will use the renamed package.
|
||||
A typical use case is to switch to another version of a certain package.
|
||||
For example, in the Nixpkgs repository we have multiple versions of `django` and `scipy`.
|
||||
In the following example we use a different version of `scipy` and create an environment that uses it.
|
||||
All packages in the Python package set will now use the updated `scipy` version.
|
||||
|
||||
```nix
|
||||
with import <nixpkgs> {};
|
||||
|
||||
(
|
||||
let
|
||||
packageOverrides = self: super: {
|
||||
scipy = super.scipy_0_17;
|
||||
};
|
||||
in (pkgs.python35.override {inherit packageOverrides;}).withPackages (ps: [ps.blaze])
|
||||
).env
|
||||
```
|
||||
The requested package `blaze` depends on `pandas` which itself depends on `scipy`.
|
||||
|
||||
If you want the whole of Nixpkgs to use your modifications, then you can use `pkgs.overridePackages`
|
||||
as explained in this manual. In the following example we build a `inkscape` using a different version of `numpy`.
|
||||
```
|
||||
let
|
||||
pkgs = import <nixpkgs> {};
|
||||
newpkgs = pkgs.overridePackages ( pkgsself: pkgssuper: {
|
||||
python27 = let
|
||||
packageOverrides = self: super: {
|
||||
numpy = super.numpy_1_10;
|
||||
};
|
||||
in pkgssuper.python27.override {inherit packageOverrides;};
|
||||
} );
|
||||
in newpkgs.inkscape
|
||||
```
|
||||
|
||||
### `python setup.py bdist_wheel` cannot create .whl
|
||||
|
||||
Executing `python setup.py bdist_wheel` in a `nix-shell `fails with
|
||||
```
|
||||
ValueError: ZIP does not support timestamps before 1980
|
||||
```
|
||||
This is because files are included that depend on items in the Nix store which have a timestamp of, that is, it corresponds to January the 1st, 1970 at 00:00:00. And as the error informs you, ZIP does not support that.
|
||||
The command `bdist_wheel` takes into account `SOURCE_DATE_EPOCH`, and `nix-shell` sets this to 1. By setting it to a value corresponding to 1980 or later, or by unsetting it, it is possible to build wheels.
|
||||
|
||||
Use 1980 as timestamp:
|
||||
```
|
||||
nix-shell --run "SOURCE_DATE_EPOCH=315532800 python3 setup.py bdist_wheel"
|
||||
```
|
||||
or the current time:
|
||||
```
|
||||
nix-shell --run "SOURCE_DATE_EPOCH=$(date +%s) python3 setup.py bdist_wheel"
|
||||
```
|
||||
or unset:
|
||||
```
|
||||
nix-shell --run "unset SOURCE_DATE_EPOCH; python3 setup.py bdist_wheel"
|
||||
```
|
||||
|
||||
### `install_data` / `data_files` problems
|
||||
|
||||
If you get the following error:
|
||||
|
||||
could not create '/nix/store/6l1bvljpy8gazlsw2aw9skwwp4pmvyxw-python-2.7.8/etc':
|
||||
Permission denied
|
||||
|
||||
This is a [known bug](https://bitbucket.org/pypa/setuptools/issue/130/install_data-doesnt-respect-prefix) in setuptools.
|
||||
Setuptools `install_data` does not respect `--prefix`. An example of such package using the feature is `pkgs/tools/X11/xpra/default.nix`.
|
||||
As workaround install it as an extra `preInstall` step:
|
||||
|
||||
${python.interpreter} setup.py install_data --install-dir=$out --root=$out
|
||||
sed -i '/ = data\_files/d' setup.py
|
||||
|
||||
|
||||
### Rationale of non-existent global site-packages
|
||||
|
||||
On most operating systems a global `site-packages` is maintained. This however
|
||||
becomes problematic if you want to run multiple Python versions or have multiple
|
||||
versions of certain libraries for your projects. Generally, you would solve such
|
||||
issues by creating virtual environments using `virtualenv`.
|
||||
|
||||
On Nix each package has an isolated dependency tree which, in the case of
|
||||
Python, guarantees the right versions of the interpreter and libraries or
|
||||
packages are available. There is therefore no need to maintain a global `site-packages`.
|
||||
|
||||
If you want to create a Python environment for development, then the recommended
|
||||
method is to use `nix-shell`, either with or without the `python.buildEnv`
|
||||
function.
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
### Contributing guidelines
|
||||
|
||||
Following rules are desired to be respected:
|
||||
|
||||
* Make sure package builds for all python interpreters. Use `disabled` argument to `buildPythonPackage` to set unsupported interpreters.
|
||||
* If tests need to be disabled for a package, make sure you leave a comment about reasoning.
|
||||
* Packages in `pkgs/top-level/python-packages.nix` are sorted quasi-alphabetically to avoid merge conflicts.
|
||||
* Python libraries are supposed to be in `python-packages.nix` and packaged with `buildPythonPackage`. Python applications live outside of `python-packages.nix` and are packaged with `buildPythonApplication`.
|
||||
@@ -1,70 +0,0 @@
|
||||
<section xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="sec-language-qt">
|
||||
|
||||
<title>Qt</title>
|
||||
|
||||
<para>The information in this section applies to Qt 5.5 and later.</para>
|
||||
|
||||
<para>Qt is an application development toolkit for C++. Although it is
|
||||
not a distinct programming language, there are special considerations
|
||||
for packaging Qt-based programs and libraries. A small set of tools
|
||||
and conventions has grown out of these considerations.</para>
|
||||
|
||||
<section xml:id="ssec-qt-libraries"><title>Libraries</title>
|
||||
|
||||
<para>Packages that provide libraries should be listed in
|
||||
<varname>qt5LibsFun</varname> so that the library is built with each
|
||||
Qt version. A set of packages is provided for each version of Qt; for
|
||||
example, <varname>qt5Libs</varname> always provides libraries built
|
||||
with the latest version, <varname>qt55Libs</varname> provides
|
||||
libraries built with Qt 5.5, and so on. To avoid version conflicts, no
|
||||
top-level attributes are created for these packages.</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="ssec-qt-programs"><title>Programs</title>
|
||||
|
||||
<para>Application packages do not need to be built with every Qt
|
||||
version. To ensure consistency between the package's dependencies,
|
||||
call the package with <literal>qt5Libs.callPackage</literal> instead
|
||||
of the usual <literal>callPackage</literal>. An older version may be
|
||||
selected in case of incompatibility. For example, to build with Qt
|
||||
5.5, call the package with
|
||||
<literal>qt55Libs.callPackage</literal>.</para>
|
||||
|
||||
<para>Several environment variables must be set at runtime for Qt
|
||||
applications to function correctly, including:</para>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem><para><envar>QT_PLUGIN_PATH</envar></para></listitem>
|
||||
<listitem><para><envar>QML_IMPORT_PATH</envar></para></listitem>
|
||||
<listitem><para><envar>QML2_IMPORT_PATH</envar></para></listitem>
|
||||
<listitem><para><envar>XDG_DATA_DIRS</envar></para></listitem>
|
||||
</itemizedlist>
|
||||
|
||||
<para>To ensure that these are set correctly, the program must be wrapped by
|
||||
invoking <literal>wrapQtProgram <replaceable>program</replaceable></literal>
|
||||
during installation (for example, during
|
||||
<literal>fixupPhase</literal>). <literal>wrapQtProgram</literal>
|
||||
accepts the same options as <literal>makeWrapper</literal>.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="ssec-qt-kde"><title>KDE</title>
|
||||
|
||||
<para>Many of the considerations above also apply to KDE packages,
|
||||
especially the need to set the correct environment variables at
|
||||
runtime. To ensure that this is done, invoke <literal>wrapKDEProgram
|
||||
<replaceable>program</replaceable></literal> during
|
||||
installation. <literal>wrapKDEProgram</literal> also generates a
|
||||
<literal>ksycoca</literal> database so that required data and services
|
||||
can be found. Like its Qt counterpart,
|
||||
<literal>wrapKDEProgram</literal> accepts the same options as
|
||||
<literal>makeWrapper</literal>.</para>
|
||||
|
||||
</section>
|
||||
|
||||
</section>
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
<section xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="sec-language-ruby">
|
||||
|
||||
<title>Ruby</title>
|
||||
|
||||
<para>There currently is support to bundle applications that are packaged as Ruby gems. The utility "bundix" allows you to write a <filename>Gemfile</filename>, let bundler create a <filename>Gemfile.lock</filename>, and then convert
|
||||
this into a nix expression that contains all Gem dependencies automatically.</para>
|
||||
|
||||
<para>For example, to package sensu, we did:</para>
|
||||
|
||||
<screen>
|
||||
<![CDATA[$ cd pkgs/servers/monitoring
|
||||
$ mkdir sensu
|
||||
$ cd sensu
|
||||
$ cat > Gemfile
|
||||
source 'https://rubygems.org'
|
||||
gem 'sensu'
|
||||
$ nix-shell -p bundler --command "bundler package --path /tmp/vendor/bundle"
|
||||
$ $(nix-build '<nixpkgs>' -A bundix)/bin/bundix
|
||||
$ cat > default.nix
|
||||
{ lib, bundlerEnv, ruby }:
|
||||
|
||||
bundlerEnv rec {
|
||||
name = "sensu-${version}";
|
||||
|
||||
version = (import gemset).sensu.version;
|
||||
inherit ruby;
|
||||
gemfile = ./Gemfile;
|
||||
lockfile = ./Gemfile.lock;
|
||||
gemset = ./gemset.nix;
|
||||
|
||||
meta = with lib; {
|
||||
description = "A monitoring framework that aims to be simple, malleable, and scalable";
|
||||
homepage = http://sensuapp.org/;
|
||||
license = with licenses; mit;
|
||||
maintainers = with maintainers; [ theuni ];
|
||||
platforms = platforms.unix;
|
||||
};
|
||||
}]]>
|
||||
</screen>
|
||||
|
||||
<para>Please check in the <filename>Gemfile</filename>, <filename>Gemfile.lock</filename> and the <filename>gemset.nix</filename> so future updates can be run easily.
|
||||
</para>
|
||||
|
||||
<para>Resulting derivations also have two helpful items, <literal>env</literal> and <literal>wrapper</literal>. The first one allows one to quickly drop into
|
||||
<command>nix-shell</command> with the specified environment present. E.g. <command>nix-shell -A sensu.env</command> would give you an environment with Ruby preset
|
||||
so it has all the libraries necessary for <literal>sensu</literal> in its paths. The second one can be used to make derivations from custom Ruby scripts which have
|
||||
<filename>Gemfile</filename>s with their dependencies specified. It is a derivation with <command>ruby</command> wrapped so it can find all the needed dependencies.
|
||||
For example, to make a derivation <literal>my-script</literal> for a <filename>my-script.rb</filename> (which should be placed in <filename>bin</filename>) you should
|
||||
run <command>bundix</command> as specified above and then use <literal>bundlerEnv</literal> lile this:</para>
|
||||
|
||||
<programlisting>
|
||||
<![CDATA[let env = bundlerEnv {
|
||||
name = "my-script-env";
|
||||
|
||||
inherit ruby;
|
||||
gemfile = ./Gemfile;
|
||||
lockfile = ./Gemfile.lock;
|
||||
gemset = ./gemset.nix;
|
||||
};
|
||||
|
||||
in stdenv.mkDerivation {
|
||||
name = "my-script";
|
||||
|
||||
buildInputs = [ env.wrapper ];
|
||||
|
||||
script = ./my-script.rb;
|
||||
|
||||
buildCommand = ''
|
||||
mkdir -p $out/bin
|
||||
install -D -m755 $script $out/bin/my-script
|
||||
patchShebangs $out/bin/my-script
|
||||
'';
|
||||
}]]>
|
||||
</programlisting>
|
||||
|
||||
</section>
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
<section xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="sec-language-texlive">
|
||||
|
||||
<title>TeX Live</title>
|
||||
|
||||
<para>Since release 15.09 there is a new TeX Live packaging that lives entirely under attribute <varname>texlive</varname>.</para>
|
||||
<section><title>User's guide</title>
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
For basic usage just pull <varname>texlive.combined.scheme-basic</varname> for an environment with basic LaTeX support.</para></listitem>
|
||||
<listitem><para>
|
||||
It typically won't work to use separately installed packages together.
|
||||
Instead, you can build a custom set of packages like this:
|
||||
<programlisting>
|
||||
texlive.combine {
|
||||
inherit (texlive) scheme-small collection-langkorean algorithms cm-super;
|
||||
}
|
||||
</programlisting>
|
||||
There are all the schemes, collections and a few thousand packages, as defined upstream (perhaps with tiny differences).
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
By default you only get executables and files needed during runtime, and a little documentation for the core packages. To change that, you need to add <varname>pkgFilter</varname> function to <varname>combine</varname>.
|
||||
<programlisting>
|
||||
texlive.combine {
|
||||
# inherit (texlive) whatever-you-want;
|
||||
pkgFilter = pkg:
|
||||
pkg.tlType == "run" || pkg.tlType == "bin" || pkg.pname == "cm-super";
|
||||
# elem tlType [ "run" "bin" "doc" "source" ]
|
||||
# there are also other attributes: version, name
|
||||
}
|
||||
</programlisting>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
You can list packages e.g. by <command>nix-repl</command>.
|
||||
<programlisting>
|
||||
$ nix-repl
|
||||
nix-repl> :l <nixpkgs>
|
||||
nix-repl> texlive.collection-<TAB>
|
||||
</programlisting>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</section>
|
||||
|
||||
<section><title>Known problems</title>
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
Some tools are still missing, e.g. luajittex;</para></listitem>
|
||||
<listitem><para>
|
||||
some apps aren't packaged/tested yet (asymptote, biber, etc.);</para></listitem>
|
||||
<listitem><para>
|
||||
feature/bug: when a package is rejected by <varname>pkgFilter</varname>, its dependencies are still propagated;</para></listitem>
|
||||
<listitem><para>
|
||||
in case of any bugs or feature requests, file a github issue or better a pull request and /cc @vcunat.</para></listitem>
|
||||
</itemizedlist>
|
||||
</section>
|
||||
|
||||
|
||||
</section>
|
||||
|
||||
@@ -12,15 +12,10 @@
|
||||
<xi:include href="introduction.xml" />
|
||||
<xi:include href="quick-start.xml" />
|
||||
<xi:include href="stdenv.xml" />
|
||||
<xi:include href="multiple-output.xml" />
|
||||
<xi:include href="configuration.xml" />
|
||||
<xi:include href="functions.xml" />
|
||||
<xi:include href="meta.xml" />
|
||||
<xi:include href="languages-frameworks/index.xml" />
|
||||
<xi:include href="language-support.xml" />
|
||||
<xi:include href="package-notes.xml" />
|
||||
<xi:include href="coding-conventions.xml" />
|
||||
<xi:include href="submitting-changes.xml" />
|
||||
<xi:include href="reviewing-contributions.xml" />
|
||||
<xi:include href="contributing.xml" />
|
||||
|
||||
</book>
|
||||
|
||||
84
doc/meta.xml
84
doc/meta.xml
@@ -33,7 +33,7 @@ the package. The value of a meta-attribute must be a string.</para>
|
||||
command-line using <command>nix-env</command>:
|
||||
|
||||
<screen>
|
||||
$ nix-env -qa hello --json
|
||||
$ nix-env -qa hello --meta --json
|
||||
{
|
||||
"hello": {
|
||||
"meta": {
|
||||
@@ -61,7 +61,7 @@ $ nix-env -qa hello --json
|
||||
"i686-openbsd",
|
||||
"x86_64-openbsd"
|
||||
],
|
||||
"position": "/home/user/dev/nixpkgs/pkgs/applications/misc/hello/default.nix:14"
|
||||
"position": "/home/user/dev/nixpkgs/pkgs/applications/misc/hello/ex-2/default.nix:14"
|
||||
},
|
||||
"name": "hello-2.9",
|
||||
"system": "x86_64-linux"
|
||||
@@ -82,8 +82,7 @@ hello-2.3 A program that produces a familiar, friendly greeting
|
||||
</para>
|
||||
|
||||
|
||||
<section xml:id="sec-standard-meta-attributes"><title>Standard
|
||||
meta-attributes</title>
|
||||
<section><title>Standard meta-attributes</title>
|
||||
|
||||
<para>It is expected that each meta-attribute is one of the following:</para>
|
||||
|
||||
@@ -112,6 +111,11 @@ meta-attributes</title>
|
||||
package.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>version</varname></term>
|
||||
<listitem><para>Package version.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>branch</varname></term>
|
||||
<listitem><para>Release branch. Used to specify that a package is not
|
||||
@@ -133,39 +137,12 @@ meta-attributes</title>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>license</varname></term>
|
||||
<listitem>
|
||||
<para>
|
||||
The license, or licenses, for the package. One from the attribute set
|
||||
defined in <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/lib/licenses.nix">
|
||||
<filename>nixpkgs/lib/licenses.nix</filename></link>. At this moment
|
||||
using both a list of licenses and a single license is valid. If the
|
||||
license field is in the form of a list representation, then it means
|
||||
that parts of the package are licensed differently. Each license
|
||||
should preferably be referenced by their attribute. The non-list
|
||||
attribute value can also be a space delimited string representation of
|
||||
the contained attribute shortNames or spdxIds. The following are all valid
|
||||
examples:
|
||||
<itemizedlist>
|
||||
<listitem><para>Single license referenced by attribute (preferred)
|
||||
<literal>stdenv.lib.licenses.gpl3</literal>.
|
||||
</para></listitem>
|
||||
<listitem><para>Single license referenced by its attribute shortName (frowned upon)
|
||||
<literal>"gpl3"</literal>.
|
||||
</para></listitem>
|
||||
<listitem><para>Single license referenced by its attribute spdxId (frowned upon)
|
||||
<literal>"GPL-3.0"</literal>.
|
||||
</para></listitem>
|
||||
<listitem><para>Multiple licenses referenced by attribute (preferred)
|
||||
<literal>with stdenv.lib.licenses; [ asl20 free ofl ]</literal>.
|
||||
</para></listitem>
|
||||
<listitem><para>Multiple licenses referenced as a space delimited string of attribute shortNames (frowned upon)
|
||||
<literal>"asl20 free ofl"</literal>.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
For details, see <xref linkend='sec-meta-license'/>.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem><para>The license for the package. One from the
|
||||
attribute set defined in <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/lib/licenses.nix">
|
||||
<filename>nixpkgs/lib/licenses.nix</filename></link>. Example:
|
||||
<literal>stdenv.lib.licenses.gpl3</literal>. For details, see
|
||||
<xref linkend='sec-meta-license'/>.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
@@ -258,54 +235,45 @@ a value from <varname>stdenv.lib.licenses</varname> defined in
|
||||
<link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/lib/licenses.nix">
|
||||
<filename>nixpkgs/lib/licenses.nix</filename></link>,
|
||||
or in-place license description of the same format if the license is
|
||||
unlikely to be useful in another expression.</para>
|
||||
|
||||
<para>Although it's typically better to indicate the specific license,
|
||||
a few generic options are available:
|
||||
unlikely to be useful in another expression.
|
||||
|
||||
A few generic options are available, although it's typically better
|
||||
to indicate the specific license:
|
||||
<variablelist>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>stdenv.lib.licenses.free</varname>,
|
||||
<varname>"free"</varname></term>
|
||||
|
||||
<term><varname>free</varname></term>
|
||||
<listitem><para>Catch-all for free software licenses not listed
|
||||
above.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>stdenv.lib.licenses.unfreeRedistributable</varname>,
|
||||
<varname>"unfree-redistributable"</varname></term>
|
||||
|
||||
<term><varname>unfree-redistributable</varname></term>
|
||||
<listitem><para>Unfree package that can be redistributed in binary
|
||||
form. That is, it’s legal to redistribute the
|
||||
form. That is, it’s legal to redistribute the
|
||||
<emphasis>output</emphasis> of the derivation. This means that
|
||||
the package can be included in the Nixpkgs
|
||||
channel.</para>
|
||||
|
||||
<para>Sometimes proprietary software can only be redistributed
|
||||
unmodified. Make sure the builder doesn’t actually modify the
|
||||
unmodified. Make sure the builder doesn’t actually modify the
|
||||
original binaries; otherwise we’re breaking the license. For
|
||||
instance, the NVIDIA X11 drivers can be redistributed unmodified,
|
||||
but our builder applies <command>patchelf</command> to make them
|
||||
work. Thus, its license is <varname>"unfree"</varname> and it
|
||||
work. Thus, its license is <varname>unfree</varname> and it
|
||||
cannot be included in the Nixpkgs channel.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>stdenv.lib.licenses.unfree</varname>,
|
||||
<varname>"unfree"</varname></term>
|
||||
|
||||
<listitem><para>Unfree package that cannot be redistributed. You
|
||||
<term><varname>unfree</varname></term>
|
||||
<listitem><para>Unfree package that cannot be redistributed. You
|
||||
can build it yourself, but you cannot redistribute the output of
|
||||
the derivation. Thus it cannot be included in the Nixpkgs
|
||||
the derivation. Thus it cannot be included in the Nixpkgs
|
||||
channel.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>stdenv.lib.licenses.unfreeRedistributableFirmware</varname>,
|
||||
<varname>"unfree-redistributable-firmware"</varname></term>
|
||||
|
||||
<term><varname>unfree-redistributable-firmware</varname></term>
|
||||
<listitem><para>This package supplies unfree, redistributable
|
||||
firmware. This is a separate value from
|
||||
<varname>unfree-redistributable</varname> because not everybody
|
||||
|
||||
@@ -1,104 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE chapter [
|
||||
<!ENTITY ndash "–"> <!-- @vcunat likes to use this one ;-) -->
|
||||
]>
|
||||
<chapter xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="chap-multiple-output">
|
||||
|
||||
<title>Multiple-output packages</title>
|
||||
|
||||
<section><title>Introduction</title>
|
||||
<para>The Nix language allows a derivation to produce multiple outputs, which is similar to what is utilized by other Linux distribution packaging systems. The outputs reside in separate nix store paths, so they can be mostly handled independently of each other, including passing to build inputs, garbage collection or binary substitution. The exception is that building from source always produces all the outputs.</para>
|
||||
<para>The main motivation is to save disk space by reducing runtime closure sizes; consequently also sizes of substituted binaries get reduced. Splitting can be used to have more granular runtime dependencies, for example the typical reduction is to split away development-only files, as those are typically not needed during runtime. As a result, closure sizes of many packages can get reduced to a half or even much less.</para>
|
||||
<note><para>The reduction effects could be instead achieved by building the parts in completely separate derivations. That would often additionally reduce build-time closures, but it tends to be much harder to write such derivations, as build systems typically assume all parts are being built at once. This compromise approach of single source package producing multiple binary packages is also utilized often by rpm and deb.</para></note>
|
||||
</section>
|
||||
|
||||
<section><title>Installing a split package</title>
|
||||
<para>When installing a package via <varname>systemPackages</varname> or <command>nix-env</command> you have several options:</para>
|
||||
<warning><para>Currently <command>nix-env</command> almost always installs all outputs until https://github.com/NixOS/nix/pull/815 gets merged.</para></warning>
|
||||
<itemizedlist>
|
||||
<listitem><para>You can install particular outputs explicitly, as each is available in the Nix language as an attribute of the package. The <varname>outputs</varname> attribute contains a list of output names.</para></listitem>
|
||||
<listitem><para>You can let it use the default outputs. These are handled by <varname>meta.outputsToInstall</varname> attribute that contains a list of output names.</para>
|
||||
<para>TODO: more about tweaking the attribute, etc.</para></listitem>
|
||||
<listitem><para>NixOS provides configuration option <varname>environment.extraOutputsToInstall</varname> that allows adding extra outputs of <varname>environment.systemPackages</varname> atop the default ones. It's mainly meant for documentation and debug symbols, and it's also modified by specific options.</para>
|
||||
<note><para>At this moment there is no similar configurability for packages installed by <command>nix-env</command>. You can still use approach from <xref linkend="sec-modify-via-packageOverrides" /> to override <varname>meta.outputsToInstall</varname> attributes, but that's a rather inconvenient way.</para></note>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</section>
|
||||
|
||||
<section><title>Using a split package</title>
|
||||
<para>In the Nix language the individual outputs can be reached explicitly as attributes, e.g. <varname>coreutils.info</varname>, but the typical case is just using packages as build inputs.</para>
|
||||
<para>When a multiple-output derivation gets into a build input of another derivation, the <varname>dev</varname> output is added if it exists, otherwise the first output is added. In addition to that, <varname>propagatedBuildOutputs</varname> of that package which by default contain <varname>$outputBin</varname> and <varname>$outputLib</varname> are also added. (See <xref linkend="multiple-output-file-type-groups" />.)</para>
|
||||
</section>
|
||||
|
||||
|
||||
<section><title>Writing a split derivation</title>
|
||||
<para>Here you find how to write a derivation that produces multiple outputs.</para>
|
||||
<para>In nixpkgs there is a framework supporting multiple-output derivations. It tries to cover most cases by default behavior. You can find the source separated in <<filename>nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh</filename>>; it's relatively well-readable. The whole machinery is triggered by defining the <varname>outputs</varname> attribute to contain the list of desired output names (strings).</para>
|
||||
<programlisting>outputs = [ "bin" "dev" "out" "doc" ];</programlisting>
|
||||
<para>Often such a single line is enough. For each output an equally named environment variable is passed to the builder and contains the path in nix store for that output. By convention, the first output should contain the executable programs provided by the package as that output is used by Nix in string conversions, allowing references to binaries like <literal>${pkgs.perl}/bin/perl</literal> to always work. Typically you also want to have the main <varname>out</varname> output, as it catches any files that didn't get elsewhere.</para>
|
||||
|
||||
<note><para>There is a special handling of the <varname>debug</varname> output, described at <xref linkend="stdenv-separateDebugInfo" />.</para></note>
|
||||
|
||||
<section xml:id="multiple-output-file-type-groups">
|
||||
<title>File type groups</title>
|
||||
<para>The support code currently recognizes some particular kinds of outputs and either instructs the build system of the package to put files into their desired outputs or it moves the files during the fixup phase. Each group of file types has an <varname>outputFoo</varname> variable specifying the output name where they should go. If that variable isn't defined by the derivation writer, it is guessed – a default output name is defined, falling back to other possibilities if the output isn't defined.</para>
|
||||
<variablelist>
|
||||
|
||||
<varlistentry><term><varname>
|
||||
$outputDev</varname></term><listitem><para>
|
||||
is for development-only files. These include C(++) headers, pkg-config, cmake and aclocal files. They go to <varname>dev</varname> or <varname>out</varname> by default.
|
||||
</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry><term><varname>
|
||||
$outputBin</varname></term><listitem><para>
|
||||
is meant for user-facing binaries, typically residing in bin/. They go to <varname>bin</varname> or <varname>out</varname> by default.
|
||||
</para></listitem></varlistentry>
|
||||
|
||||
<varlistentry><term><varname>
|
||||
$outputLib</varname></term><listitem><para>
|
||||
is meant for libraries, typically residing in <filename>lib/</filename> and <filename>libexec/</filename>. They go to <varname>lib</varname> or <varname>out</varname> by default.
|
||||
</para></listitem></varlistentry>
|
||||
|
||||
<varlistentry><term><varname>
|
||||
$outputDoc</varname></term><listitem><para>
|
||||
is for user documentation, typically residing in <filename>share/doc/</filename>. It goes to <varname>doc</varname> or <varname>out</varname> by default.
|
||||
</para></listitem></varlistentry>
|
||||
|
||||
<varlistentry><term><varname>
|
||||
$outputDevdoc</varname></term><listitem><para>
|
||||
is for <emphasis>developer</emphasis> documentation. Currently we count gtk-doc in there. It goes to <varname>devdoc</varname> or is removed (!) by default. This is because e.g. gtk-doc tends to be rather large and completely unused by nixpkgs users.
|
||||
</para></listitem></varlistentry>
|
||||
|
||||
<varlistentry><term><varname>
|
||||
$outputMan</varname></term><listitem><para>
|
||||
is for man pages (except for section 3). They go to <varname>man</varname> or <varname>doc</varname> or <varname>$outputBin</varname> by default.
|
||||
</para></listitem></varlistentry>
|
||||
|
||||
<varlistentry><term><varname>
|
||||
$outputDevman</varname></term><listitem><para>
|
||||
is for section 3 man pages. They go to <varname>devman</varname> or <varname>$outputMan</varname> by default.
|
||||
</para></listitem></varlistentry>
|
||||
|
||||
<varlistentry><term><varname>
|
||||
$outputInfo</varname></term><listitem><para>
|
||||
is for info pages. They go to <varname>info</varname> or <varname>doc</varname> or <varname>$outputMan</varname> by default.
|
||||
</para></listitem></varlistentry>
|
||||
|
||||
</variablelist>
|
||||
</section>
|
||||
|
||||
<section><title>Common caveats</title>
|
||||
<itemizedlist>
|
||||
<listitem><para>Some configure scripts don't like some of the parameters passed by default by the framework, e.g. <literal>--docdir=/foo/bar</literal>. You can disable this by setting <literal>setOutputFlags = false;</literal>.</para></listitem>
|
||||
<listitem><para>The outputs of a single derivation can retain references to each other, but note that circular references are not allowed. (And each strongly-connected component would act as a single output anyway.)</para></listitem>
|
||||
<listitem><para>Most of split packages contain their core functionality in libraries. These libraries tend to refer to various kind of data that typically gets into <varname>out</varname>, e.g. locale strings, so there is often no advantage in separating the libraries into <varname>lib</varname>, as keeping them in <varname>out</varname> is easier.</para></listitem>
|
||||
<listitem><para>Some packages have hidden assumptions on install paths, which complicates splitting.</para></listitem>
|
||||
</itemizedlist>
|
||||
</section>
|
||||
|
||||
</section><!--Writing a split derivation-->
|
||||
|
||||
</chapter>
|
||||
14
doc/old/update-upstream-data.txt
Normal file
14
doc/old/update-upstream-data.txt
Normal file
@@ -0,0 +1,14 @@
|
||||
Semi-automatic source information updating using "update-upstream-data.sh" script and "src-{,info-}for-*.nix"
|
||||
|
||||
1. Recognizing when a pre-existing package uses this mechanism.
|
||||
|
||||
Packages using this automatical update mechanism have src-info-for-default.nix and src-for-default.nix next to default.nix. src-info-for-default.nix describes getting the freshest source from upstream web site; src-for-default.nix is a generated file with the current data about used source. Both files define a simple attrSet.
|
||||
|
||||
src-info-for-default.nix (for a file grabbed via http) contains at least downloadPage attribute - it is the page we need to look at to find out the latest version. It also contains baseName that is used for automatical generation of package name containing version. It can contain extra data for trickier cases.
|
||||
|
||||
src-for-default.nix will contain advertisedUrl (raw URL chosen on the site; its change prompts regeneration of source data), url for fetchurl, hash, version retrieved from the download URL and suggested package name.
|
||||
|
||||
2. Updating a package
|
||||
|
||||
nixpkgs/pkgs/build-support/upstream-updater directory contains some scripts. The worker script is called update-upstream-data.sh. This script requires main expression name (e.g. default.nix). It can optionally accpet a second parameter, URL which will be used instead of getting one by parsing the downloadPage (version extraction, mirror URL creation etc. will still be run). After running the script, check src-for-default.nix (or replace default.nix with expression name, if there are seceral expressions in the directory) for new version information.
|
||||
|
||||
@@ -125,7 +125,7 @@ $ make menuconfig ARCH=<replaceable>arch</replaceable></screen>
|
||||
<listitem>
|
||||
<para>It may be that the new kernel requires updating the external
|
||||
kernel modules and kernel-dependent packages listed in the
|
||||
<varname>linuxPackagesFor</varname> function in
|
||||
<varname>kernelPackagesFor</varname> function in
|
||||
<filename>all-packages.nix</filename> (such as the NVIDIA drivers,
|
||||
AUFS, etc.). If the updated packages aren’t backwards compatible
|
||||
with older kernels, you may need to keep the older versions
|
||||
@@ -141,7 +141,7 @@ $ make menuconfig ARCH=<replaceable>arch</replaceable></screen>
|
||||
|
||||
<!--============================================================-->
|
||||
|
||||
<section xml:id="sec-xorg">
|
||||
<section>
|
||||
|
||||
<title>X.org</title>
|
||||
|
||||
@@ -219,301 +219,5 @@ you should modify
|
||||
</section>
|
||||
-->
|
||||
|
||||
<!--============================================================-->
|
||||
|
||||
<section xml:id="sec-eclipse">
|
||||
|
||||
<title>Eclipse</title>
|
||||
|
||||
<para>
|
||||
The Nix expressions related to the Eclipse platform and IDE are in
|
||||
<link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/eclipse"><filename>pkgs/applications/editors/eclipse</filename></link>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Nixpkgs provides a number of packages that will install Eclipse in
|
||||
its various forms, these range from the bare-bones Eclipse
|
||||
Platform to the more fully featured Eclipse SDK or Scala-IDE
|
||||
packages and multiple version are often available. It is possible
|
||||
to list available Eclipse packages by issuing the command:
|
||||
|
||||
<screen>
|
||||
$ nix-env -f '<nixpkgs>' -qaP -A eclipses --description
|
||||
</screen>
|
||||
|
||||
Once an Eclipse variant is installed it can be run using the
|
||||
<command>eclipse</command> command, as expected. From within
|
||||
Eclipse it is then possible to install plugins in the usual manner
|
||||
by either manually specifying an Eclipse update site or by
|
||||
installing the Marketplace Client plugin and using it to discover
|
||||
and install other plugins. This installation method provides an
|
||||
Eclipse installation that closely resemble a manually installed
|
||||
Eclipse.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you prefer to install plugins in a more declarative manner then
|
||||
Nixpkgs also offer a number of Eclipse plugins that can be
|
||||
installed in an <emphasis>Eclipse environment</emphasis>. This
|
||||
type of environment is created using the function
|
||||
<varname>eclipseWithPlugins</varname> found inside the
|
||||
<varname>nixpkgs.eclipses</varname> attribute set. This function
|
||||
takes as argument <literal>{ eclipse, plugins ? [], jvmArgs ? []
|
||||
}</literal> where <varname>eclipse</varname> is a one of the
|
||||
Eclipse packages described above, <varname>plugins</varname> is a
|
||||
list of plugin derivations, and <varname>jvmArgs</varname> is a
|
||||
list of arguments given to the JVM running the Eclipse. For
|
||||
example, say you wish to install the latest Eclipse Platform with
|
||||
the popular Eclipse Color Theme plugin and also allow Eclipse to
|
||||
use more RAM. You could then add
|
||||
|
||||
<screen>
|
||||
packageOverrides = pkgs: {
|
||||
myEclipse = with pkgs.eclipses; eclipseWithPlugins {
|
||||
eclipse = eclipse-platform;
|
||||
jvmArgs = [ "-Xmx2048m" ];
|
||||
plugins = [ plugins.color-theme ];
|
||||
};
|
||||
}
|
||||
</screen>
|
||||
|
||||
to your Nixpkgs configuration
|
||||
(<filename>~/.nixpkgs/config.nix</filename>) and install it by
|
||||
running <command>nix-env -f '<nixpkgs>' -iA
|
||||
myEclipse</command> and afterward run Eclipse as usual. It is
|
||||
possible to find out which plugins are available for installation
|
||||
using <varname>eclipseWithPlugins</varname> by running
|
||||
|
||||
<screen>
|
||||
$ nix-env -f '<nixpkgs>' -qaP -A eclipses.plugins --description
|
||||
</screen>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If there is a need to install plugins that are not available in
|
||||
Nixpkgs then it may be possible to define these plugins outside
|
||||
Nixpkgs using the <varname>buildEclipseUpdateSite</varname> and
|
||||
<varname>buildEclipsePlugin</varname> functions found in the
|
||||
<varname>nixpkgs.eclipses.plugins</varname> attribute set. Use the
|
||||
<varname>buildEclipseUpdateSite</varname> function to install a
|
||||
plugin distributed as an Eclipse update site. This function takes
|
||||
<literal>{ name, src }</literal> as argument where
|
||||
<literal>src</literal> indicates the Eclipse update site archive.
|
||||
All Eclipse features and plugins within the downloaded update site
|
||||
will be installed. When an update site archive is not available
|
||||
then the <varname>buildEclipsePlugin</varname> function can be
|
||||
used to install a plugin that consists of a pair of feature and
|
||||
plugin JARs. This function takes an argument <literal>{ name,
|
||||
srcFeature, srcPlugin }</literal> where
|
||||
<literal>srcFeature</literal> and <literal>srcPlugin</literal> are
|
||||
the feature and plugin JARs, respectively.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Expanding the previous example with two plugins using the above
|
||||
functions we have
|
||||
<screen>
|
||||
packageOverrides = pkgs: {
|
||||
myEclipse = with pkgs.eclipses; eclipseWithPlugins {
|
||||
eclipse = eclipse-platform;
|
||||
jvmArgs = [ "-Xmx2048m" ];
|
||||
plugins = [
|
||||
plugins.color-theme
|
||||
(plugins.buildEclipsePlugin {
|
||||
name = "myplugin1-1.0";
|
||||
srcFeature = fetchurl {
|
||||
url = "http://…/features/myplugin1.jar";
|
||||
sha256 = "123…";
|
||||
};
|
||||
srcPlugin = fetchurl {
|
||||
url = "http://…/plugins/myplugin1.jar";
|
||||
sha256 = "123…";
|
||||
};
|
||||
});
|
||||
(plugins.buildEclipseUpdateSite {
|
||||
name = "myplugin2-1.0";
|
||||
src = fetchurl {
|
||||
stripRoot = false;
|
||||
url = "http://…/myplugin2.zip";
|
||||
sha256 = "123…";
|
||||
};
|
||||
});
|
||||
];
|
||||
};
|
||||
}
|
||||
</screen>
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-elm">
|
||||
|
||||
<title>Elm</title>
|
||||
|
||||
<para>
|
||||
The Nix expressions for Elm reside in
|
||||
<filename>pkgs/development/compilers/elm</filename>. They are generated
|
||||
automatically by <command>update-elm.rb</command> script. One should
|
||||
specify versions of Elm packages inside the script, clear the
|
||||
<filename>packages</filename> directory and run the script from inside it.
|
||||
<literal>elm-reactor</literal> is special because it also has Elm package
|
||||
dependencies. The process is not automated very much for now -- you should
|
||||
get the <literal>elm-reactor</literal> source tree (e.g. with
|
||||
<command>nix-shell</command>) and run <command>elm2nix.rb</command> inside
|
||||
it. Place the resulting <filename>package.nix</filename> file into
|
||||
<filename>packages/elm-reactor-elm.nix</filename>.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-autojump">
|
||||
|
||||
<title>Autojump</title>
|
||||
|
||||
<para>
|
||||
autojump needs the shell integration to be useful but unlike other systems,
|
||||
nix doesn't have a standard share directory location. This is why a
|
||||
<command>autojump-share</command> script is shipped that prints the location
|
||||
of the shared folder. This can then be used in the .bashrc like this:
|
||||
<screen>
|
||||
source "$(autojump-share)/autojump.bash"
|
||||
</screen>
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-steam">
|
||||
|
||||
<title>Steam</title>
|
||||
|
||||
<section xml:id="sec-steam-nix">
|
||||
|
||||
<title>Steam in Nix</title>
|
||||
|
||||
<para>
|
||||
Steam is distributed as a <filename>.deb</filename> file, for now only
|
||||
as an i686 package (the amd64 package only has documentation).
|
||||
When unpacked, it has a script called <filename>steam</filename> that
|
||||
in ubuntu (their target distro) would go to <filename>/usr/bin
|
||||
</filename>. When run for the first time, this script copies some
|
||||
files to the user's home, which include another script that is the
|
||||
ultimate responsible for launching the steam binary, which is also
|
||||
in $HOME.
|
||||
</para>
|
||||
<para>
|
||||
Nix problems and constraints:
|
||||
<itemizedlist>
|
||||
<listitem><para>We don't have <filename>/bin/bash</filename> and many
|
||||
scripts point there. Similarly for <filename>/usr/bin/python</filename>
|
||||
.</para></listitem>
|
||||
<listitem><para>We don't have the dynamic loader in <filename>/lib
|
||||
</filename>.</para></listitem>
|
||||
<listitem><para>The <filename>steam.sh</filename> script in $HOME can
|
||||
not be patched, as it is checked and rewritten by steam.</para></listitem>
|
||||
<listitem><para>The steam binary cannot be patched, it's also checked.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
<para>
|
||||
The current approach to deploy Steam in NixOS is composing a FHS-compatible
|
||||
chroot environment, as documented
|
||||
<link xlink:href="http://sandervanderburg.blogspot.nl/2013/09/composing-fhs-compatible-chroot.html">here</link>.
|
||||
This allows us to have binaries in the expected paths without disrupting the system,
|
||||
and to avoid patching them to work in a non FHS environment.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-steam-play">
|
||||
|
||||
<title>How to play</title>
|
||||
|
||||
<para>
|
||||
For 64-bit systems it's important to have
|
||||
<programlisting>hardware.opengl.driSupport32Bit = true;</programlisting>
|
||||
in your <filename>/etc/nixos/configuration.nix</filename>. You'll also need
|
||||
<programlisting>hardware.pulseaudio.support32Bit = true;</programlisting>
|
||||
if you are using PulseAudio - this will enable 32bit ALSA apps integration.
|
||||
To use the Steam controller, you need to add
|
||||
<programlisting>services.udev.extraRules = ''
|
||||
SUBSYSTEM=="usb", ATTRS{idVendor}=="28de", MODE="0666"
|
||||
KERNEL=="uinput", MODE="0660", GROUP="users", OPTIONS+="static_node=uinput"
|
||||
'';</programlisting>
|
||||
to your configuration.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-steam-troub">
|
||||
|
||||
<title>Troubleshooting</title>
|
||||
|
||||
<para>
|
||||
<variablelist>
|
||||
|
||||
<varlistentry>
|
||||
<term>Steam fails to start. What do I do?</term>
|
||||
<listitem><para>Try to run
|
||||
<programlisting>strace steam</programlisting>
|
||||
to see what is causing steam to fail.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term>Using the FOSS Radeon drivers</term>
|
||||
<listitem><itemizedlist><listitem><para>
|
||||
The open source radeon drivers need a newer libc++ than is provided
|
||||
by the default runtime, which leads to a crash on launch. Use
|
||||
<programlisting>environment.systemPackages = [(pkgs.steam.override { newStdcpp = true; })];</programlisting>
|
||||
in your config if you get an error like
|
||||
<programlisting>
|
||||
libGL error: unable to load driver: radeonsi_dri.so
|
||||
libGL error: driver pointer missing
|
||||
libGL error: failed to load driver: radeonsi
|
||||
libGL error: unable to load driver: swrast_dri.so
|
||||
libGL error: failed to load driver: swrast</programlisting></para></listitem>
|
||||
<listitem><para>
|
||||
Steam ships statically linked with a version of libcrypto that
|
||||
conflics with the one dynamically loaded by radeonsi_dri.so.
|
||||
If you get the error
|
||||
<programlisting>steam.sh: line 713: 7842 Segmentation fault (core dumped)</programlisting>
|
||||
have a look at <link xlink:href="https://github.com/NixOS/nixpkgs/pull/20269">this pull request</link>.
|
||||
</para></listitem>
|
||||
|
||||
</itemizedlist></listitem></varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term>Java</term>
|
||||
<listitem><orderedlist>
|
||||
<listitem><para>
|
||||
There is no java in steam chrootenv by default. If you get a message like
|
||||
<programlisting>/home/foo/.local/share/Steam/SteamApps/common/towns/towns.sh: line 1: java: command not found</programlisting>
|
||||
You need to add
|
||||
<programlisting> steam.override { withJava = true; };</programlisting>
|
||||
to your configuration.
|
||||
</para></listitem>
|
||||
</orderedlist></listitem></varlistentry>
|
||||
|
||||
</variablelist>
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-steam-run">
|
||||
|
||||
<title>steam-run</title>
|
||||
<para>
|
||||
The FHS-compatible chroot used for steam can also be used to run
|
||||
other linux games that expect a FHS environment.
|
||||
To do it, add
|
||||
<programlisting>pkgs.(steam.override {
|
||||
nativeOnly = true;
|
||||
newStdcpp = true;
|
||||
}).run</programlisting>
|
||||
to your configuration, rebuild, and run the game with
|
||||
<programlisting>steam-run ./foo</programlisting>
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
</section>
|
||||
|
||||
</chapter>
|
||||
|
||||
@@ -55,18 +55,18 @@ $ git add pkgs/development/libraries/libfoo/default.nix</screen>
|
||||
<itemizedlist>
|
||||
|
||||
<listitem>
|
||||
<para>GNU Hello: <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/misc/hello/default.nix"><filename>pkgs/applications/misc/hello/default.nix</filename></link>.
|
||||
Trivial package, which specifies some <varname>meta</varname>
|
||||
attributes which is good practice.</para>
|
||||
<para>GNU cpio: <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/archivers/cpio/default.nix"><filename>pkgs/tools/archivers/cpio/default.nix</filename></link>.
|
||||
The simplest possible package. The generic builder in
|
||||
<varname>stdenv</varname> does everything for you. It has
|
||||
no dependencies beyond <varname>stdenv</varname>.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>GNU cpio: <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/archivers/cpio/default.nix"><filename>pkgs/tools/archivers/cpio/default.nix</filename></link>.
|
||||
Also a simple package. The generic builder in
|
||||
<varname>stdenv</varname> does everything for you. It has
|
||||
no dependencies beyond <varname>stdenv</varname>.</para>
|
||||
<para>GNU Hello: <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/misc/hello/ex-2/default.nix"><filename>pkgs/applications/misc/hello/ex-2/default.nix</filename></link>.
|
||||
Also trivial, but it specifies some <varname>meta</varname>
|
||||
attributes which is good practice.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
|
||||
@@ -1,393 +0,0 @@
|
||||
<chapter xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xmlns:xi="http://www.w3.org/2001/XInclude"
|
||||
version="5.0"
|
||||
xml:id="sec-reviewing-contributions">
|
||||
|
||||
<title>Reviewing contributions</title>
|
||||
|
||||
<warning>
|
||||
<para>The following section is a draft and reviewing policy is still being
|
||||
discussed.</para>
|
||||
</warning>
|
||||
|
||||
<para>The nixpkgs projects receives a fairly high number of contributions via
|
||||
GitHub pull-requests. Reviewing and approving these is an important task and a
|
||||
way to contribute to the project.</para>
|
||||
|
||||
<para>The high change rate of nixpkgs make any pull request that is open for
|
||||
long enough subject to conflicts that will require extra work from the
|
||||
submitter or the merger. Reviewing pull requests in a timely manner and being
|
||||
responsive to the comments is the key to avoid these. Github provides sort
|
||||
filters that can be used to see the <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/pulls?q=is%3Apr+is%3Aopen+sort%3Aupdated-desc">most
|
||||
recently</link> and the <link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/pulls?q=is%3Apr+is%3Aopen+sort%3Aupdated-asc">least
|
||||
recently</link> updated pull-requests.</para>
|
||||
|
||||
<para>When reviewing a pull request, please always be nice and polite.
|
||||
Controversial changes can lead to controversial opinions, but it is important
|
||||
to respect every community members and their work.</para>
|
||||
|
||||
<para>GitHub provides reactions, they are a simple and quick way to provide
|
||||
feedback to pull-requests or any comments. The thumb-down reaction should be
|
||||
used with care and if possible accompanied with some explanations so the
|
||||
submitter has directions to improve his contribution.</para>
|
||||
|
||||
<para>Pull-requests reviews should include a list of what has been reviewed in a
|
||||
comment, so other reviewers and mergers can know the state of the
|
||||
review.</para>
|
||||
|
||||
<para>All the review template samples provided in this section are generic and
|
||||
meant as examples. Their usage is optional and the reviewer is free to adapt
|
||||
them to his liking.</para>
|
||||
|
||||
<section><title>Package updates</title>
|
||||
|
||||
<para>A package update is the most trivial and common type of pull-request.
|
||||
These pull-requests mainly consist in updating the version part of the package
|
||||
name and the source hash.</para>
|
||||
<para>It can happen that non trivial updates include patches or more complex
|
||||
changes.</para>
|
||||
|
||||
<para>Reviewing process:</para>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem><para>Add labels to the pull-request. (Requires commit
|
||||
rights)</para>
|
||||
<itemizedlist>
|
||||
<listitem><para><literal>8.has: package (update)</literal> and any topic
|
||||
label that fit the updated package.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that the package versioning is fitting the
|
||||
guidelines.</para></listitem>
|
||||
<listitem><para>Ensure that the commit text is fitting the
|
||||
guidelines.</para></listitem>
|
||||
<listitem><para>Ensure that the package maintainers are notified.</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>mention-bot usually notify GitHub users based on the
|
||||
submitted changes, but it can happen that it misses some of the
|
||||
package maintainers.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that the meta field contains correct
|
||||
information.</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>License can change with version updates, so it should be
|
||||
checked to be fitting upstream license.</para></listitem>
|
||||
<listitem><para>If the package has no maintainer, a maintainer must be
|
||||
set. This can be the update submitter or a community member that
|
||||
accepts to take maintainership of the package.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that the code contains no typos.</para></listitem>
|
||||
<listitem><para>Building the package locally.</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Pull-requests are often targeted to the master or staging
|
||||
branch so building the pull-request locally as it is submitted can
|
||||
trigger a large amount of source builds.</para>
|
||||
<para>It is possible to rebase the changes on nixos-unstable or
|
||||
nixpkgs-unstable for easier review by running the following commands
|
||||
from a nixpkgs clone.
|
||||
<screen>
|
||||
$ git remote add channels https://github.com/NixOS/nixpkgs-channels.git <co
|
||||
xml:id='reviewing-rebase-1' />
|
||||
$ git fetch channels nixos-unstable <co xml:id='reviewing-rebase-2' />
|
||||
$ git fetch origin pull/PRNUMBER/head <co xml:id='reviewing-rebase-3' />
|
||||
$ git rebase --onto nixos-unstable BASEBRANCH FETCH_HEAD <co
|
||||
xml:id='reviewing-rebase-4' />
|
||||
</screen>
|
||||
<calloutlist>
|
||||
<callout arearefs='reviewing-rebase-1'>
|
||||
<para>This should be done only once to be able to fetch channel
|
||||
branches from the nixpkgs-channels repository.</para>
|
||||
</callout>
|
||||
<callout arearefs='reviewing-rebase-2'>
|
||||
<para>Fetching the nixos-unstable branch.</para>
|
||||
</callout>
|
||||
<callout arearefs='reviewing-rebase-3'>
|
||||
<para>Fetching the pull-request changes, <varname>PRNUMBER</varname>
|
||||
is the number at the end of the pull-request title and
|
||||
<varname>BASEBRANCH</varname> the base branch of the
|
||||
pull-request.</para>
|
||||
</callout>
|
||||
<callout arearefs='reviewing-rebase-3'>
|
||||
<para>Rebasing the pull-request changes to the nixos-unstable
|
||||
branch.</para>
|
||||
</callout>
|
||||
</calloutlist>
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>The <link xlink:href="https://github.com/madjar/nox">nox</link>
|
||||
tool can be used to review a pull-request content in a single command.
|
||||
It doesn't rebase on a channel branch so it might trigger multiple
|
||||
source builds. <varname>PRNUMBER</varname> should be replaced by the
|
||||
number at the end of the pull-request title.</para>
|
||||
<screen>
|
||||
$ nix-shell -p nox --run "nox-review -k pr PRNUMBER"
|
||||
</screen>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Running every binary.</para></listitem>
|
||||
</itemizedlist>
|
||||
|
||||
<example><title>Sample template for a package update review</title>
|
||||
<screen>
|
||||
##### Reviewed points
|
||||
|
||||
- [ ] package name fits guidelines
|
||||
- [ ] package version fits guidelines
|
||||
- [ ] package build on ARCHITECTURE
|
||||
- [ ] executables tested on ARCHITECTURE
|
||||
- [ ] all depending packages build
|
||||
|
||||
##### Possible improvements
|
||||
|
||||
##### Comments
|
||||
|
||||
</screen></example>
|
||||
</section>
|
||||
|
||||
<section><title>New packages</title>
|
||||
|
||||
<para>New packages are a common type of pull-requests. These pull requests
|
||||
consists in adding a new nix-expression for a package.</para>
|
||||
|
||||
<para>Reviewing process:</para>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem><para>Add labels to the pull-request. (Requires commit
|
||||
rights)</para>
|
||||
<itemizedlist>
|
||||
<listitem><para><literal>8.has: package (new)</literal> and any topic
|
||||
label that fit the new package.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that the package versioning is fitting the
|
||||
guidelines.</para></listitem>
|
||||
<listitem><para>Ensure that the commit name is fitting the
|
||||
guidelines.</para></listitem>
|
||||
<listitem><para>Ensure that the meta field contains correct
|
||||
information.</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>License must be checked to be fitting upstream
|
||||
license.</para></listitem>
|
||||
<listitem><para>Platforms should be set or the package will not get binary
|
||||
substitutes.</para></listitem>
|
||||
<listitem><para>A maintainer must be set, this can be the package
|
||||
submitter or a community member that accepts to take maintainership of
|
||||
the package.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that the code contains no typos.</para></listitem>
|
||||
<listitem><para>Ensure the package source.</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Mirrors urls should be used when
|
||||
available.</para></listitem>
|
||||
<listitem><para>The most appropriate function should be used (e.g.
|
||||
packages from GitHub should use
|
||||
<literal>fetchFromGitHub</literal>).</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Building the package locally.</para></listitem>
|
||||
<listitem><para>Running every binary.</para></listitem>
|
||||
</itemizedlist>
|
||||
|
||||
<example><title>Sample template for a new package review</title>
|
||||
<screen>
|
||||
##### Reviewed points
|
||||
|
||||
- [ ] package path fits guidelines
|
||||
- [ ] package name fits guidelines
|
||||
- [ ] package version fits guidelines
|
||||
- [ ] package build on ARCHITECTURE
|
||||
- [ ] executables tested on ARCHITECTURE
|
||||
- [ ] `meta.description` is set and fits guidelines
|
||||
- [ ] `meta.license` fits upstream license
|
||||
- [ ] `meta.platforms` is set
|
||||
- [ ] `meta.maintainers` is set
|
||||
- [ ] build time only dependencies are declared in `nativeBuildInputs`
|
||||
- [ ] source is fetched using the appropriate function
|
||||
- [ ] phases are respected
|
||||
- [ ] patches that are remotely available are fetched with `fetchpatch`
|
||||
|
||||
##### Possible improvements
|
||||
|
||||
##### Comments
|
||||
|
||||
</screen></example>
|
||||
</section>
|
||||
|
||||
<section><title>Module updates</title>
|
||||
|
||||
<para>Module updates are submissions changing modules in some ways. These often
|
||||
contains changes to the options or introduce new options.</para>
|
||||
|
||||
<para>Reviewing process</para>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem><para>Add labels to the pull-request. (Requires commit
|
||||
rights)</para>
|
||||
<itemizedlist>
|
||||
<listitem><para><literal>8.has: module (update)</literal> and any topic
|
||||
label that fit the module.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that the module maintainers are notified.</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Mention-bot notify GitHub users based on the submitted
|
||||
changes, but it can happen that it miss some of the package
|
||||
maintainers.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that the module tests, if any, are
|
||||
succeeding.</para></listitem>
|
||||
<listitem><para>Ensure that the introduced options are correct.</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Type should be appropriate (string related types differs
|
||||
in their merging capabilities, <literal>optionSet</literal> and
|
||||
<literal>string</literal> types are deprecated).</para></listitem>
|
||||
<listitem><para>Description, default and example should be
|
||||
provided.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that option changes are backward compatible.</para>
|
||||
<itemizedlist>
|
||||
<listitem><para><literal>mkRenamedOptionModule</literal> and
|
||||
<literal>mkAliasOptionModule</literal> functions provide way to make
|
||||
option changes backward compatible.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that removed options are declared with
|
||||
<literal>mkRemovedOptionModule</literal></para></listitem>
|
||||
<listitem><para>Ensure that changes that are not backward compatible are
|
||||
mentioned in release notes.</para></listitem>
|
||||
<listitem><para>Ensure that documentations affected by the change is
|
||||
updated.</para></listitem>
|
||||
</itemizedlist>
|
||||
|
||||
<example><title>Sample template for a module update review</title>
|
||||
<screen>
|
||||
##### Reviewed points
|
||||
|
||||
- [ ] changes are backward compatible
|
||||
- [ ] removed options are declared with `mkRemovedOptionModule`
|
||||
- [ ] changes that are not backward compatible are documented in release notes
|
||||
- [ ] module tests succeed on ARCHITECTURE
|
||||
- [ ] options types are appropriate
|
||||
- [ ] options description is set
|
||||
- [ ] options example is provided
|
||||
- [ ] documentation affected by the changes is updated
|
||||
|
||||
##### Possible improvements
|
||||
|
||||
##### Comments
|
||||
|
||||
</screen></example>
|
||||
</section>
|
||||
|
||||
<section><title>New modules</title>
|
||||
|
||||
<para>New modules submissions introduce a new module to NixOS.</para>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem><para>Add labels to the pull-request. (Requires commit
|
||||
rights)</para>
|
||||
<itemizedlist>
|
||||
<listitem><para><literal>8.has: module (new)</literal> and any topic label
|
||||
that fit the module.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that the module tests, if any, are
|
||||
succeeding.</para></listitem>
|
||||
<listitem><para>Ensure that the introduced options are correct.</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Type should be appropriate (string related types differs
|
||||
in their merging capabilities, <literal>optionSet</literal> and
|
||||
<literal>string</literal> types are deprecated).</para></listitem>
|
||||
<listitem><para>Description, default and example should be
|
||||
provided.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that module <literal>meta</literal> field is
|
||||
present</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Maintainers should be declared in
|
||||
<literal>meta.maintainers</literal>.</para></listitem>
|
||||
<listitem><para>Module documentation should be declared with
|
||||
<literal>meta.doc</literal>.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
<listitem><para>Ensure that the module respect other modules
|
||||
functionality.</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>For example, enabling a module should not open firewall
|
||||
ports by default.</para></listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
|
||||
<example><title>Sample template for a new module review</title>
|
||||
<screen>
|
||||
##### Reviewed points
|
||||
|
||||
- [ ] module path fits the guidelines
|
||||
- [ ] module tests succeed on ARCHITECTURE
|
||||
- [ ] options have appropriate types
|
||||
- [ ] options have default
|
||||
- [ ] options have example
|
||||
- [ ] options have descriptions
|
||||
- [ ] No unneeded package is added to system.environmentPackages
|
||||
- [ ] meta.maintainers is set
|
||||
- [ ] module documentation is declared in meta.doc
|
||||
|
||||
##### Possible improvements
|
||||
|
||||
##### Comments
|
||||
|
||||
</screen></example>
|
||||
</section>
|
||||
|
||||
<section><title>Other submissions</title>
|
||||
|
||||
<para>Other type of submissions requires different reviewing steps.</para>
|
||||
|
||||
<para>If you consider having enough knowledge and experience in a topic and
|
||||
would like to be a long-term reviewer for related submissions, please contact
|
||||
the current reviewers for that topic. They will give you information about the
|
||||
reviewing process.
|
||||
The main reviewers for a topic can be hard to find as there is no list, but
|
||||
checking past pull-requests to see who reviewed or git-blaming the code to see
|
||||
who committed to that topic can give some hints.</para>
|
||||
|
||||
<para>Container system, boot system and library changes are some examples of the
|
||||
pull requests fitting this category.</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section><title>Merging pull-requests</title>
|
||||
|
||||
<para>It is possible for community members that have enough knowledge and
|
||||
experience on a special topic to contribute by merging pull requests.</para>
|
||||
|
||||
<para>TODO: add the procedure to request merging rights.</para>
|
||||
|
||||
<!--
|
||||
The following paragraph about how to deal with unactive contributors is just a
|
||||
proposition and should be modified to what the community agrees to be the right
|
||||
policy.
|
||||
|
||||
<para>Please note that contributors with commit rights unactive for more than
|
||||
three months will have their commit rights revoked.</para>
|
||||
-->
|
||||
|
||||
<para>In a case a contributor leaves definitively the Nix community, he should
|
||||
create an issue or notify the mailing list with references of packages and
|
||||
modules he maintains so the maintainership can be taken over by other
|
||||
contributors.</para>
|
||||
|
||||
</section>
|
||||
</chapter>
|
||||
465
doc/stdenv.xml
465
doc/stdenv.xml
@@ -15,8 +15,7 @@ environment does everything automatically. If
|
||||
can easily customise or override the various build phases.</para>
|
||||
|
||||
|
||||
<section xml:id="sec-using-stdenv"><title>Using
|
||||
<literal>stdenv</literal></title>
|
||||
<section><title>Using <literal>stdenv</literal></title>
|
||||
|
||||
<para>To build a package with the standard environment, you use the
|
||||
function <varname>stdenv.mkDerivation</varname>, instead of the
|
||||
@@ -27,7 +26,7 @@ stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
src = fetchurl {
|
||||
url = http://example.org/libfoo-1.2.3.tar.bz2;
|
||||
sha256 = "0x2g1jqygyr5wiwg4ma1nd7w4ydpy82z9gkcv8vh2v8dn3y58v5m";
|
||||
md5 = "e1ec107956b6ddcb0b8b0679367e9ac9";
|
||||
};
|
||||
}</programlisting>
|
||||
|
||||
@@ -59,7 +58,7 @@ build. To make this easier, the standard environment breaks the
|
||||
package build into a number of <emphasis>phases</emphasis>, all of
|
||||
which can be overridden or modified individually: unpacking the
|
||||
sources, applying patches, configuring, building, and installing.
|
||||
(There are some others; see <xref linkend="sec-stdenv-phases"/>.)
|
||||
(There are some others; see <xref linkend="ssec-stdenv-phases"/>.)
|
||||
For instance, a package that doesn’t supply a makefile but instead has
|
||||
to be compiled “manually” could be handled like this:
|
||||
|
||||
@@ -125,8 +124,7 @@ genericBuild
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="sec-tools-of-stdenv"><title>Tools provided by
|
||||
<literal>stdenv</literal></title>
|
||||
<section><title>Tools provided by <literal>stdenv</literal></title>
|
||||
|
||||
<para>The standard environment provides the following packages:
|
||||
|
||||
@@ -224,67 +222,10 @@ genericBuild
|
||||
|
||||
</variablelist>
|
||||
|
||||
<variablelist>
|
||||
<title>Variables affecting build properties</title>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>enableParallelBuilding</varname></term>
|
||||
<listitem><para>If set, <literal>stdenv</literal> will pass specific
|
||||
flags to <literal>make</literal> and other build tools to enable
|
||||
parallel building with up to <literal>build-cores</literal>
|
||||
workers.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>preferLocalBuild</varname></term>
|
||||
<listitem><para>If set, specifies that the package is so lightweight
|
||||
in terms of build operations (e.g. write a text file from a Nix string
|
||||
to the store) that there's no need to look for it in binary caches --
|
||||
it's faster to just build it locally. It also tells Hydra and other
|
||||
facilities that this package doesn't need to be exported in binary
|
||||
caches (noone would use it, after all).</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
</variablelist>
|
||||
|
||||
<variablelist>
|
||||
<title>Special variables</title>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>passthru</varname></term>
|
||||
<listitem><para>This is an attribute set which can be filled with arbitrary
|
||||
values. For example:
|
||||
|
||||
<programlisting>
|
||||
passthru = {
|
||||
foo = "bar";
|
||||
baz = {
|
||||
value1 = 4;
|
||||
value2 = 5;
|
||||
};
|
||||
}
|
||||
</programlisting>
|
||||
|
||||
</para>
|
||||
|
||||
<para>Values inside it are not passed to the builder, so you can change
|
||||
them without triggering a rebuild. However, they can be accessed outside of a
|
||||
derivation directly, as if they were set inside a derivation itself, e.g.
|
||||
<literal>hello.baz.value1</literal>. We don't specify any usage or
|
||||
schema of <literal>passthru</literal> - it is meant for values that would be
|
||||
useful outside the derivation in other parts of a Nix expression (e.g. in other
|
||||
derivations). An example would be to convey some specific dependency of your
|
||||
derivation which contains a program with plugins support. Later, others who
|
||||
make derivations with plugins can use passed-through dependency to ensure that
|
||||
their plugin would be binary-compatible with built program.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
</variablelist>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="sec-stdenv-phases"><title>Phases</title>
|
||||
<section xml:id="ssec-stdenv-phases"><title>Phases</title>
|
||||
|
||||
<para>The generic builder has a number of <emphasis>phases</emphasis>.
|
||||
Package builds are split into phases to make it easier to override
|
||||
@@ -302,8 +243,7 @@ is convenient to override a phase from the derivation, while the
|
||||
latter is convenient from a build script.</para>
|
||||
|
||||
|
||||
<section xml:id="ssec-controlling-phases"><title>Controlling
|
||||
phases</title>
|
||||
<section><title>Controlling phases</title>
|
||||
|
||||
<para>There are a number of variables that control what phases are
|
||||
executed and in what order:
|
||||
@@ -387,7 +327,7 @@ executed and in what order:
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="ssec-unpack-phase"><title>The unpack phase</title>
|
||||
<section><title>The unpack phase</title>
|
||||
|
||||
<para>The unpack phase is responsible for unpacking the source code of
|
||||
the package. The default implementation of
|
||||
@@ -494,7 +434,7 @@ Additional file types can be supported by setting the
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="ssec-patch-phase"><title>The patch phase</title>
|
||||
<section><title>The patch phase</title>
|
||||
|
||||
<para>The patch phase applies the list of patches defined in the
|
||||
<varname>patches</varname> variable.</para>
|
||||
@@ -537,7 +477,7 @@ Additional file types can be supported by setting the
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="ssec-configure-phase"><title>The configure phase</title>
|
||||
<section><title>The configure phase</title>
|
||||
|
||||
<para>The configure phase prepares the source tree for building. The
|
||||
default <function>configurePhase</function> runs
|
||||
@@ -557,8 +497,8 @@ script) if it exists.</para>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>configureFlags</varname></term>
|
||||
<listitem><para>A list of strings passed as additional arguments to the
|
||||
configure script.</para></listitem>
|
||||
<listitem><para>Additional arguments passed to the configure
|
||||
script.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
@@ -573,8 +513,8 @@ script) if it exists.</para>
|
||||
<term><varname>dontAddPrefix</varname></term>
|
||||
<listitem><para>By default, the flag
|
||||
<literal>--prefix=$prefix</literal> is added to the configure
|
||||
flags. If this is undesirable, set this variable to
|
||||
true.</para></listitem>
|
||||
flags. If this is undesirable, set this variable to a non-empty
|
||||
value.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
@@ -590,7 +530,8 @@ script) if it exists.</para>
|
||||
<listitem><para>By default, the flag
|
||||
<literal>--disable-dependency-tracking</literal> is added to the
|
||||
configure flags to speed up Automake-based builds. If this is
|
||||
undesirable, set this variable to true.</para></listitem>
|
||||
undesirable, set this variable to a non-empty
|
||||
value.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
@@ -603,16 +544,7 @@ script) if it exists.</para>
|
||||
variables in the Libtool script to prevent Libtool from using
|
||||
libraries in <filename>/usr/lib</filename> and
|
||||
such.</para></footnote>. If this is undesirable, set this
|
||||
variable to true.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>dontDisableStatic</varname></term>
|
||||
<listitem><para>By default, when the configure script has
|
||||
<option>--enable-static</option>, the option
|
||||
<option>--disable-static</option> is added to the configure flags.</para>
|
||||
<para>If this is undesirable, set this variable to
|
||||
true.</para></listitem>
|
||||
variable to a non-empty value.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
@@ -633,7 +565,7 @@ script) if it exists.</para>
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="build-phase"><title>The build phase</title>
|
||||
<section><title>The build phase</title>
|
||||
|
||||
<para>The build phase is responsible for actually building the package
|
||||
(e.g. compiling it). The default <function>buildPhase</function>
|
||||
@@ -658,7 +590,7 @@ nothing.</para>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>makeFlags</varname></term>
|
||||
<listitem><para>A list of strings passed as additional flags to
|
||||
<listitem><para>Additional flags passed to
|
||||
<command>make</command>. These flags are also used by the default
|
||||
install and check phase. For setting make flags specific to the
|
||||
build phase, use <varname>buildFlags</varname> (see
|
||||
@@ -685,7 +617,7 @@ makeFlagsArray=(CFLAGS="-O0 -g" LDFLAGS="-lfoo -lbar")
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>buildFlags</varname> / <varname>buildFlagsArray</varname></term>
|
||||
<listitem><para>A list of strings passed as additional flags to
|
||||
<listitem><para>Additional flags passed to
|
||||
<command>make</command>. Like <varname>makeFlags</varname> and
|
||||
<varname>makeFlagsArray</varname>, but only used by the build
|
||||
phase.</para></listitem>
|
||||
@@ -717,7 +649,7 @@ called, respectively.</para>
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="ssec-check-phase"><title>The check phase</title>
|
||||
<section><title>The check phase</title>
|
||||
|
||||
<para>The check phase checks whether the package was built correctly
|
||||
by running its test suite. The default
|
||||
@@ -753,7 +685,7 @@ doCheck = true;</programlisting>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>checkFlags</varname> / <varname>checkFlagsArray</varname></term>
|
||||
<listitem><para>A list of strings passed as additional flags to
|
||||
<listitem><para>Additional flags passed to
|
||||
<command>make</command>. Like <varname>makeFlags</varname> and
|
||||
<varname>makeFlagsArray</varname>, but only used by the check
|
||||
phase.</para></listitem>
|
||||
@@ -777,7 +709,7 @@ doCheck = true;</programlisting>
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="ssec-install-phase"><title>The install phase</title>
|
||||
<section><title>The install phase</title>
|
||||
|
||||
<para>The install phase is responsible for installing the package in
|
||||
the Nix store under <envar>out</envar>. The default
|
||||
@@ -786,7 +718,7 @@ the Nix store under <envar>out</envar>. The default
|
||||
install</command>.</para>
|
||||
|
||||
<variablelist>
|
||||
<title>Variables controlling the install phase</title>
|
||||
<title>Variables controlling the check phase</title>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>makeFlags</varname> /
|
||||
@@ -808,7 +740,7 @@ installTargets = "install-bin install-doc";</programlisting>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>installFlags</varname> / <varname>installFlagsArray</varname></term>
|
||||
<listitem><para>A list of strings passed as additional flags to
|
||||
<listitem><para>Additional flags passed to
|
||||
<command>make</command>. Like <varname>makeFlags</varname> and
|
||||
<varname>makeFlagsArray</varname>, but only used by the install
|
||||
phase.</para></listitem>
|
||||
@@ -832,7 +764,7 @@ installTargets = "install-bin install-doc";</programlisting>
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="ssec-fixup-phase"><title>The fixup phase</title>
|
||||
<section><title>The fixup phase</title>
|
||||
|
||||
<para>The fixup phase performs some (Nix-specific) post-processing
|
||||
actions on the files installed under <filename>$out</filename> by the
|
||||
@@ -865,7 +797,7 @@ following:
|
||||
</para>
|
||||
|
||||
<variablelist>
|
||||
<title>Variables controlling the fixup phase</title>
|
||||
<title>Variables controlling the check phase</title>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>dontStrip</varname></term>
|
||||
@@ -873,12 +805,6 @@ following:
|
||||
stripped. By default, they are.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>dontMoveSbin</varname></term>
|
||||
<listitem><para>If set, files in <filename>$out/sbin</filename> are not moved
|
||||
to <filename>$out/bin</filename>. By default, they are.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>stripAllList</varname></term>
|
||||
<listitem><para>List of directories to search for libraries and
|
||||
@@ -927,6 +853,12 @@ following:
|
||||
rewritten to paths in the Nix store.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>dontPatchSourceShebangs</varname></term>
|
||||
<listitem><para>Same as <varname>dontPatchShebangs</varname>, but applied
|
||||
to the source code before configurePhase.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>forceShare</varname></term>
|
||||
<listitem><para>The list of directories that must be moved from
|
||||
@@ -956,76 +888,12 @@ following:
|
||||
phase.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry xml:id="stdenv-separateDebugInfo">
|
||||
<term><varname>separateDebugInfo</varname></term>
|
||||
<listitem><para>If set to <literal>true</literal>, the standard
|
||||
environment will enable debug information in C/C++ builds. After
|
||||
installation, the debug information will be separated from the
|
||||
executables and stored in the output named
|
||||
<literal>debug</literal>. (This output is enabled automatically;
|
||||
you don’t need to set the <varname>outputs</varname> attribute
|
||||
explicitly.) To be precise, the debug information is stored in
|
||||
<filename><replaceable>debug</replaceable>/lib/debug/.build-id/<replaceable>XX</replaceable>/<replaceable>YYYY…</replaceable></filename>,
|
||||
where <replaceable>XXYYYY…</replaceable> is the <replaceable>build
|
||||
ID</replaceable> of the binary — a SHA-1 hash of the contents of
|
||||
the binary. Debuggers like GDB use the build ID to look up the
|
||||
separated debug information.</para>
|
||||
|
||||
<para>For example, with GDB, you can add
|
||||
|
||||
<programlisting>
|
||||
set debug-file-directory ~/.nix-profile/lib/debug
|
||||
</programlisting>
|
||||
|
||||
to <filename>~/.gdbinit</filename>. GDB will then be able to find
|
||||
debug information installed via <literal>nix-env
|
||||
-i</literal>.</para>
|
||||
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
||||
</variablelist>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="ssec-installCheck-phase"><title>The installCheck phase</title>
|
||||
|
||||
<para>The installCheck phase checks whether the package was installed
|
||||
correctly by running its test suite against the installed directories.
|
||||
The default <function>installCheck</function> calls <command>make
|
||||
installcheck</command>.</para>
|
||||
|
||||
<variablelist>
|
||||
<title>Variables controlling the installCheck phase</title>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>doInstallCheck</varname></term>
|
||||
<listitem><para>If set to a non-empty string, the installCheck phase is
|
||||
executed, otherwise it is skipped (default). Thus you should set
|
||||
|
||||
<programlisting>doInstallCheck = true;</programlisting>
|
||||
|
||||
in the derivation to enable install checks.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>preInstallCheck</varname></term>
|
||||
<listitem><para>Hook executed at the start of the installCheck
|
||||
phase.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>postInstallCheck</varname></term>
|
||||
<listitem><para>Hook executed at the end of the installCheck
|
||||
phase.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
</variablelist>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="ssec-distribution-phase"><title>The distribution
|
||||
phase</title>
|
||||
<section><title>The distribution phase</title>
|
||||
|
||||
<para>The distribution phase is intended to produce a source
|
||||
distribution of the package. The default
|
||||
@@ -1204,17 +1072,7 @@ PATH=/nix/store/68afga4khv0w...-coreutils-6.12/bin
|
||||
echo @foo@
|
||||
</programlisting>
|
||||
|
||||
That is, no substitution is performed for undefined variables.</para>
|
||||
|
||||
<para>Environment variables that start with an uppercase letter or an
|
||||
underscore are filtered out,
|
||||
to prevent global variables (like <literal>HOME</literal>) or private
|
||||
variables (like <literal>__ETC_PROFILE_DONE</literal>) from accidentally
|
||||
getting substituted.
|
||||
The variables also have to be valid bash “names”, as
|
||||
defined in the bash manpage (alphanumeric or <literal>_</literal>,
|
||||
must not start with a number).</para>
|
||||
</listitem>
|
||||
That is, no substitution is performed for undefined variables.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
|
||||
@@ -1231,23 +1089,10 @@ echo @foo@
|
||||
<term><function>stripHash</function>
|
||||
<replaceable>path</replaceable></term>
|
||||
<listitem><para>Strips the directory and hash part of a store
|
||||
path, outputting the name part to <literal>stdout</literal>.
|
||||
For example:
|
||||
|
||||
<programlisting>
|
||||
# prints coreutils-8.24
|
||||
stripHash "/nix/store/9s9r019176g7cvn2nvcw41gsp862y6b4-coreutils-8.24"
|
||||
</programlisting>
|
||||
|
||||
If you wish to store the result in another variable, then the
|
||||
following idiom may be useful:
|
||||
|
||||
<programlisting>
|
||||
name="/nix/store/9s9r019176g7cvn2nvcw41gsp862y6b4-coreutils-8.24"
|
||||
someVar=$(stripHash $name)
|
||||
</programlisting>
|
||||
|
||||
</para></listitem>
|
||||
path, and prints (on standard output) only the name part. For
|
||||
instance, <literal>stripHash
|
||||
/nix/store/68afga4khv0w...-coreutils-6.12</literal> print
|
||||
<literal>coreutils-6.12</literal>.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
|
||||
@@ -1301,14 +1146,6 @@ someVar=$(stripHash $name)
|
||||
environment variable.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term>Autoconf</term>
|
||||
<listitem><para>The <varname>autoreconfHook</varname> derivation adds
|
||||
<varname>autoreconfPhase</varname>, which runs autoreconf, libtoolize and
|
||||
automake, essentially preparing the configure script in autotools-based
|
||||
builds.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term>libxml2</term>
|
||||
<listitem><para>Adds every file named
|
||||
@@ -1327,7 +1164,7 @@ someVar=$(stripHash $name)
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term>Qt 4</term>
|
||||
<term>Qt</term>
|
||||
<listitem><para>Sets the <envar>QTDIR</envar> environment variable
|
||||
to Qt’s path.</para></listitem>
|
||||
</varlistentry>
|
||||
@@ -1353,25 +1190,6 @@ someVar=$(stripHash $name)
|
||||
<envar>GST_PLUGIN_SYSTEM_PATH</envar> environment variable.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term>paxctl</term>
|
||||
<listitem><para>Defines the <varname>paxmark</varname> helper for
|
||||
setting per-executable PaX flags on Linux (where it is available by
|
||||
default; on all other platforms, <varname>paxmark</varname> is a no-op).
|
||||
For example, to disable secure memory protections on the executable
|
||||
<replaceable>foo</replaceable>:
|
||||
<programlisting>
|
||||
postFixup = ''
|
||||
paxmark m $out/bin/<replaceable>foo</replaceable>
|
||||
'';
|
||||
</programlisting>
|
||||
The <literal>m</literal> flag is the most common flag and is typically
|
||||
required for applications that employ JIT compilation or otherwise need to
|
||||
execute code generated at run-time. Disabling PaX protections should be
|
||||
considered a last resort: if possible, problematic features should be
|
||||
disabled or patched to work with PaX.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
</variablelist>
|
||||
|
||||
</para>
|
||||
@@ -1379,7 +1197,7 @@ someVar=$(stripHash $name)
|
||||
</section>
|
||||
|
||||
|
||||
<section xml:id="sec-purity-in-nixpkgs"><title>Purity in Nixpkgs</title>
|
||||
<section><title>Purity in Nixpkgs</title>
|
||||
|
||||
<para>[measures taken to prevent dependencies on packages outside the
|
||||
store, and what you can do to prevent them]</para>
|
||||
@@ -1394,209 +1212,6 @@ in the default system locations.</para>
|
||||
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-hardening-in-nixpkgs"><title>Hardening in Nixpkgs</title>
|
||||
|
||||
<para>There are flags available to harden packages at compile or link-time.
|
||||
These can be toggled using the <varname>stdenv.mkDerivation</varname> parameters
|
||||
<varname>hardeningDisable</varname> and <varname>hardeningEnable</varname>.
|
||||
</para>
|
||||
|
||||
<para>The following flags are enabled by default and might require disabling
|
||||
if the program to package is incompatible.
|
||||
</para>
|
||||
|
||||
<variablelist>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>format</varname></term>
|
||||
<listitem><para>Adds the <option>-Wformat -Wformat-security
|
||||
-Werror=format-security</option> compiler options. At present,
|
||||
this warns about calls to <varname>printf</varname> and
|
||||
<varname>scanf</varname> functions where the format string is
|
||||
not a string literal and there are no format arguments, as in
|
||||
<literal>printf(foo);</literal>. This may be a security hole
|
||||
if the format string came from untrusted input and contains
|
||||
<literal>%n</literal>.</para>
|
||||
|
||||
<para>This needs to be turned off or fixed for errors similar to:</para>
|
||||
|
||||
<programlisting>
|
||||
/tmp/nix-build-zynaddsubfx-2.5.2.drv-0/zynaddsubfx-2.5.2/src/UI/guimain.cpp:571:28: error: format not a string literal and no format arguments [-Werror=format-security]
|
||||
printf(help_message);
|
||||
^
|
||||
cc1plus: some warnings being treated as errors
|
||||
</programlisting></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>stackprotector</varname></term>
|
||||
<listitem>
|
||||
<para>Adds the <option>-fstack-protector-strong
|
||||
--param ssp-buffer-size=4</option>
|
||||
compiler options. This adds safety checks against stack overwrites
|
||||
rendering many potential code injection attacks into aborting situations.
|
||||
In the best case this turns code injection vulnerabilities into denial
|
||||
of service or into non-issues (depending on the application).</para>
|
||||
|
||||
<para>This needs to be turned off or fixed for errors similar to:</para>
|
||||
|
||||
<programlisting>
|
||||
bin/blib.a(bios_console.o): In function `bios_handle_cup':
|
||||
/tmp/nix-build-ipxe-20141124-5cbdc41.drv-0/ipxe-5cbdc41/src/arch/i386/firmware/pcbios/bios_console.c:86: undefined reference to `__stack_chk_fail'
|
||||
</programlisting></listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>fortify</varname></term>
|
||||
<listitem>
|
||||
<para>Adds the <option>-O2 -D_FORTIFY_SOURCE=2</option> compiler
|
||||
options. During code generation the compiler knows a great deal of
|
||||
information about buffer sizes (where possible), and attempts to replace
|
||||
insecure unlimited length buffer function calls with length-limited ones.
|
||||
This is especially useful for old, crufty code. Additionally, format
|
||||
strings in writable memory that contain '%n' are blocked. If an application
|
||||
depends on such a format string, it will need to be worked around.
|
||||
</para>
|
||||
|
||||
<para>Addtionally, some warnings are enabled which might trigger build
|
||||
failures if compiler warnings are treated as errors in the package build.
|
||||
In this case, set <option>NIX_CFLAGS_COMPILE</option> to
|
||||
<option>-Wno-error=warning-type</option>.</para>
|
||||
|
||||
<para>This needs to be turned off or fixed for errors similar to:</para>
|
||||
|
||||
<programlisting>
|
||||
malloc.c:404:15: error: return type is an incomplete type
|
||||
malloc.c:410:19: error: storage size of 'ms' isn't known
|
||||
</programlisting>
|
||||
<programlisting>
|
||||
strdup.h:22:1: error: expected identifier or '(' before '__extension__'
|
||||
</programlisting>
|
||||
<programlisting>
|
||||
strsep.c:65:23: error: register name not specified for 'delim'
|
||||
</programlisting>
|
||||
<programlisting>
|
||||
installwatch.c:3751:5: error: conflicting types for '__open_2'
|
||||
</programlisting>
|
||||
<programlisting>
|
||||
fcntl2.h:50:4: error: call to '__open_missing_mode' declared with attribute error: open with O_CREAT or O_TMPFILE in second argument needs 3 arguments
|
||||
</programlisting>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>pic</varname></term>
|
||||
<listitem>
|
||||
<para>Adds the <option>-fPIC</option> compiler options. This options adds
|
||||
support for position independant code in shared libraries and thus making
|
||||
ASLR possible.</para>
|
||||
<para>Most notably, the Linux kernel, kernel modules and other code
|
||||
not running in an operating system environment like boot loaders won't
|
||||
build with PIC enabled. The compiler will is most cases complain that
|
||||
PIC is not supported for a specific build.
|
||||
</para>
|
||||
|
||||
<para>This needs to be turned off or fixed for assembler errors similar to:</para>
|
||||
|
||||
<programlisting>
|
||||
ccbLfRgg.s: Assembler messages:
|
||||
ccbLfRgg.s:33: Error: missing or invalid displacement expression `private_key_len@GOTOFF'
|
||||
</programlisting>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>strictoverflow</varname></term>
|
||||
<listitem>
|
||||
<para>Signed integer overflow is undefined behaviour according to the C
|
||||
standard. If it happens, it is an error in the program as it should check
|
||||
for overflow before it can happen, not afterwards. GCC provides built-in
|
||||
functions to perform arithmetic with overflow checking, which are correct
|
||||
and faster than any custom implementation. As a workaround, the option
|
||||
<option>-fno-strict-overflow</option> makes gcc behave as if signed
|
||||
integer overflows were defined.
|
||||
</para>
|
||||
|
||||
<para>This flag should not trigger any build or runtime errors.</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>relro</varname></term>
|
||||
<listitem>
|
||||
<para>Adds the <option>-z relro</option> linker option. During program
|
||||
load, several ELF memory sections need to be written to by the linker,
|
||||
but can be turned read-only before turning over control to the program.
|
||||
This prevents some GOT (and .dtors) overwrite attacks, but at least the
|
||||
part of the GOT used by the dynamic linker (.got.plt) is still vulnerable.
|
||||
</para>
|
||||
|
||||
<para>This flag can break dynamic shared object loading. For instance, the
|
||||
module systems of Xorg and OpenCV are incompatible with this flag. In almost
|
||||
all cases the <varname>bindnow</varname> flag must also be disabled and
|
||||
incompatible programs typically fail with similar errors at runtime.</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>bindnow</varname></term>
|
||||
<listitem>
|
||||
<para>Adds the <option>-z bindnow</option> linker option. During program
|
||||
load, all dynamic symbols are resolved, allowing for the complete GOT to
|
||||
be marked read-only (due to <varname>relro</varname>). This prevents GOT
|
||||
overwrite attacks. For very large applications, this can incur some
|
||||
performance loss during initial load while symbols are resolved, but this
|
||||
shouldn't be an issue for daemons.
|
||||
</para>
|
||||
|
||||
<para>This flag can break dynamic shared object loading. For instance, the
|
||||
module systems of Xorg and PHP are incompatible with this flag. Programs
|
||||
incompatible with this flag often fail at runtime due to missing symbols,
|
||||
like:</para>
|
||||
|
||||
<programlisting>
|
||||
intel_drv.so: undefined symbol: vgaHWFreeHWRec
|
||||
</programlisting>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
||||
</variablelist>
|
||||
|
||||
<para>The following flags are disabled by default and should be enabled
|
||||
for packages that take untrusted input, like network services.
|
||||
</para>
|
||||
|
||||
<variablelist>
|
||||
|
||||
<varlistentry>
|
||||
<term><varname>pie</varname></term>
|
||||
<listitem>
|
||||
<para>Adds the <option>-fPIE</option> compiler and <option>-pie</option>
|
||||
linker options. Position Independent Executables are needed to take
|
||||
advantage of Address Space Layout Randomization, supported by modern
|
||||
kernel versions. While ASLR can already be enforced for data areas in
|
||||
the stack and heap (brk and mmap), the code areas must be compiled as
|
||||
position-independent. Shared libraries already do this with the
|
||||
<varname>pic</varname> flag, so they gain ASLR automatically, but binary
|
||||
.text regions need to be build with <varname>pie</varname> to gain ASLR.
|
||||
When this happens, ROP attacks are much harder since there are no static
|
||||
locations to bounce off of during a memory corruption attack.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
||||
</variablelist>
|
||||
|
||||
<para>For more in-depth information on these hardening flags and hardening in
|
||||
general, refer to the
|
||||
<link xlink:href="https://wiki.debian.org/Hardening">Debian Wiki</link>,
|
||||
<link xlink:href="https://wiki.ubuntu.com/Security/Features">Ubuntu Wiki</link>,
|
||||
<link xlink:href="https://wiki.gentoo.org/wiki/Project:Hardened">Gentoo Wiki</link>,
|
||||
and the <link xlink:href="https://wiki.archlinux.org/index.php/DeveloperWiki:Security">
|
||||
Arch Wiki</link>.
|
||||
</para>
|
||||
|
||||
</section>
|
||||
|
||||
</chapter>
|
||||
|
||||
|
||||
@@ -1,332 +0,0 @@
|
||||
<chapter xmlns="http://docbook.org/ns/docbook"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xml:id="chap-submitting-changes">
|
||||
|
||||
<title>Submitting changes</title>
|
||||
|
||||
<section>
|
||||
<title>Making patches</title>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Read <link xlink:href="https://nixos.org/nixpkgs/manual/">Manual (How to write packages for Nix)</link>.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Fork the repository on GitHub.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Create a branch for your future fix.
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>You can make branch from a commit of your local <command>nixos-version</command>. That will help you to avoid additional local compilations. Because you will receive packages from binary cache.
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>For example: <command>nixos-version</command> returns <command>15.05.git.0998212 (Dingo)</command>. So you can do:</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
|
||||
<screen>
|
||||
$ git checkout 0998212
|
||||
$ git checkout -b 'fix/pkg-name-update'
|
||||
</screen>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Please avoid working directly on the <command>master</command> branch.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Make commits of logical units.
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>If you removed pkgs, made some major NixOS changes etc., write about them in <command>nixos/doc/manual/release-notes/rl-unstable.xml</command>.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Check for unnecessary whitespace with <command>git diff --check</command> before committing.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Format the commit in a following way:</para>
|
||||
<programlisting>
|
||||
(pkg-name | service-name): (from -> to | init at version | refactor | etc)
|
||||
Additional information.
|
||||
</programlisting>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Examples:
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
<command>nginx: init at 2.0.1</command>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>
|
||||
<command>firefox: 3.0 -> 3.1.1</command>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>
|
||||
<command>hydra service: add bazBaz option</command>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>
|
||||
<command>nginx service: refactor config generation</command>
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Test your changes. If you work with
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>nixpkgs:
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>update pkg ->
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
<command>nix-env -i pkg-name -f <path to your local nixpkgs folder></command>
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>add pkg ->
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Make sure it's in <command>pkgs/top-level/all-packages.nix</command>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>
|
||||
<command>nix-env -i pkg-name -f <path to your local nixpkgs folder></command>
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>
|
||||
<emphasis>If you don't want to install pkg in you profile</emphasis>.
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
<command>nix-build -A pkg-attribute-name <path to your local nixpkgs folder>/default.nix</command> and check results in the folder <command>result</command>. It will appear in the same directory where you did <command>nix-build</command>.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>If you did <command>nix-env -i pkg-name</command> you can do <command>nix-env -e pkg-name</command> to uninstall it from your system.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>NixOS and its modules:
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>You can add new module to your NixOS configuration file (usually it's <command>/etc/nixos/configuration.nix</command>).
|
||||
And do <command>sudo nixos-rebuild test -I nixpkgs=<path to your local nixpkgs folder> --fast</command>.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>If you have commits <command>pkg-name: oh, forgot to insert whitespace</command>: squash commits in this case. Use <command>git rebase -i</command>.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Rebase you branch against current <command>master</command>.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>Submitting changes</title>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Push your changes to your fork of nixpkgs.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Create pull request:
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Write the title in format <command>(pkg-name | service): improvement</command>.
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>If you update the pkg, write versions <command>from -> to</command>.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Write in comment if you have tested your patch. Do not rely much on <command>TravisCI</command>.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>If you make an improvement, write about your motivation.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Notify maintainers of the package. For example add to the message: <command>cc @jagajaga @domenkozar</command>.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>Hotfixing pull requests</title>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Make the appropriate changes in you branch.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Don't create additional commits, do
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para><command>git rebase -i</command></para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
<command>git push --force</command> to your branch.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
</itemizedlist>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>Commit policy</title>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Commits must be sufficiently tested before being merged, both for the master and staging branches.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Hydra builds for master and staging should not be used as testing platform, it's a build farm for changes that have been already tested.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>When changing the bootloader installation process, extra care must be taken. Grub installations cannot be rolled back, hence changes may break people's installations forever. For any non-trivial change to the bootloader please file a PR asking for review, especially from @edolstra.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
|
||||
<section>
|
||||
<title>Master branch</title>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
It should only see non-breaking commits that do not cause mass rebuilds.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>Staging branch</title>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
It's only for non-breaking mass-rebuild commits. That means it's not to
|
||||
be used for testing, and changes must have been well tested already.
|
||||
<link xlink:href="http://comments.gmane.org/gmane.linux.distributions.nixos/13447">Read policy here</link>.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
If the branch is already in a broken state, please refrain from adding
|
||||
extra new breakages. Stabilize it for a few days, merge into master,
|
||||
then resume development on staging.
|
||||
<link xlink:href="http://hydra.nixos.org/jobset/nixpkgs/staging#tabs-evaluations">Keep an eye on the staging evaluations here</link>.
|
||||
If any fixes for staging happen to be already in master, then master can
|
||||
be merged into staging.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>Stable release branches</title>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
If you're cherry-picking a commit to a stable release branch, always use
|
||||
<command>git cherry-pick -xe</command> and ensure the message contains a
|
||||
clear description about why this needs to be included in the stable
|
||||
branch.
|
||||
</para>
|
||||
<para>An example of a cherry-picked commit would look like this:</para>
|
||||
<screen>
|
||||
nixos: Refactor the world.
|
||||
|
||||
The original commit message describing the reason why the world was torn apart.
|
||||
|
||||
(cherry picked from commit abcdef)
|
||||
Reason: I just had a gut feeling that this would also be wanted by people from
|
||||
the stone age.
|
||||
</screen>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</section>
|
||||
|
||||
</section>
|
||||
</chapter>
|
||||
|
||||
205
lib/attrsets.nix
205
lib/attrsets.nix
@@ -1,26 +1,21 @@
|
||||
# Operations on attribute sets.
|
||||
|
||||
with {
|
||||
inherit (builtins) head tail length;
|
||||
inherit (builtins) head tail;
|
||||
inherit (import ./trivial.nix) or;
|
||||
inherit (import ./default.nix) fold;
|
||||
inherit (import ./strings.nix) concatStringsSep;
|
||||
inherit (import ./lists.nix) concatMap concatLists all deepSeqList;
|
||||
inherit (import ./misc.nix) maybeAttr;
|
||||
};
|
||||
|
||||
rec {
|
||||
inherit (builtins) attrNames listToAttrs hasAttr isAttrs getAttr;
|
||||
|
||||
|
||||
/* Return an attribute from nested attribute sets.
|
||||
|
||||
Example:
|
||||
x = { a = { b = 3; }; }
|
||||
attrByPath ["a" "b"] 6 x
|
||||
=> 3
|
||||
attrByPath ["z" "z"] 6 x
|
||||
=> 6
|
||||
*/
|
||||
/* Return an attribute from nested attribute sets. For instance
|
||||
["x" "y"] applied to some set e returns e.x.y, if it exists. The
|
||||
default value is returned otherwise. */
|
||||
attrByPath = attrPath: default: e:
|
||||
let attr = head attrPath;
|
||||
in
|
||||
@@ -29,47 +24,15 @@ rec {
|
||||
then attrByPath (tail attrPath) default e.${attr}
|
||||
else default;
|
||||
|
||||
/* Return if an attribute from nested attribute set exists.
|
||||
|
||||
Example:
|
||||
x = { a = { b = 3; }; }
|
||||
hasAttrByPath ["a" "b"] x
|
||||
=> true
|
||||
hasAttrByPath ["z" "z"] x
|
||||
=> false
|
||||
|
||||
*/
|
||||
hasAttrByPath = attrPath: e:
|
||||
let attr = head attrPath;
|
||||
in
|
||||
if attrPath == [] then true
|
||||
else if e ? ${attr}
|
||||
then hasAttrByPath (tail attrPath) e.${attr}
|
||||
else false;
|
||||
|
||||
|
||||
/* Return nested attribute set in which an attribute is set.
|
||||
|
||||
Example:
|
||||
setAttrByPath ["a" "b"] 3
|
||||
=> { a = { b = 3; }; }
|
||||
*/
|
||||
/* Return nested attribute set in which an attribute is set. For instance
|
||||
["x" "y"] applied with some value v returns `x.y = v;' */
|
||||
setAttrByPath = attrPath: value:
|
||||
if attrPath == [] then value
|
||||
else listToAttrs
|
||||
[ { name = head attrPath; value = setAttrByPath (tail attrPath) value; } ];
|
||||
|
||||
|
||||
/* Like `getAttrPath' without a default value. If it doesn't find the
|
||||
path it will throw.
|
||||
|
||||
Example:
|
||||
x = { a = { b = 3; }; }
|
||||
getAttrFromPath ["a" "b"] x
|
||||
=> 3
|
||||
getAttrFromPath ["z" "z"] x
|
||||
=> error: cannot find attribute `z.z'
|
||||
*/
|
||||
getAttrFromPath = attrPath: set:
|
||||
let errorMsg = "cannot find attribute `" + concatStringsSep "." attrPath + "'";
|
||||
in attrByPath attrPath (abort errorMsg) set;
|
||||
@@ -113,39 +76,17 @@ rec {
|
||||
=> { foo = 1; }
|
||||
*/
|
||||
filterAttrs = pred: set:
|
||||
listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set));
|
||||
listToAttrs (fold (n: ys: let v = set.${n}; in if pred n v then [(nameValuePair n v)] ++ ys else ys) [] (attrNames set));
|
||||
|
||||
|
||||
/* Filter an attribute set recursivelly by removing all attributes for
|
||||
which the given predicate return false.
|
||||
|
||||
Example:
|
||||
filterAttrsRecursive (n: v: v != null) { foo = { bar = null; }; }
|
||||
=> { foo = {}; }
|
||||
*/
|
||||
filterAttrsRecursive = pred: set:
|
||||
listToAttrs (
|
||||
concatMap (name:
|
||||
let v = set.${name}; in
|
||||
if pred name v then [
|
||||
(nameValuePair name (
|
||||
if isAttrs v then filterAttrsRecursive pred v
|
||||
else v
|
||||
))
|
||||
] else []
|
||||
) (attrNames set)
|
||||
);
|
||||
|
||||
/* Apply fold functions to values grouped by key.
|
||||
|
||||
Example:
|
||||
foldAttrs (n: a: [n] ++ a) [] [{ a = 2; } { a = 3; }]
|
||||
=> { a = [ 2 3 ]; }
|
||||
/* foldAttrs: apply fold functions to values grouped by key. Eg accumulate values as list:
|
||||
foldAttrs (n: a: [n] ++ a) [] [{ a = 2; } { a = 3; }]
|
||||
=> { a = [ 2 3 ]; }
|
||||
*/
|
||||
foldAttrs = op: nul: list_of_attrs:
|
||||
fold (n: a:
|
||||
fold (name: o:
|
||||
o // (listToAttrs [{inherit name; value = op n.${name} (a.${name} or nul); }])
|
||||
o // (listToAttrs [{inherit name; value = op n.${name} (maybeAttr name nul a); }])
|
||||
) a (attrNames n)
|
||||
) {} list_of_attrs;
|
||||
|
||||
@@ -156,7 +97,7 @@ rec {
|
||||
|
||||
Type:
|
||||
collect ::
|
||||
(AttrSet -> Bool) -> AttrSet -> [x]
|
||||
(AttrSet -> Bool) -> AttrSet -> AttrSet
|
||||
|
||||
Example:
|
||||
collect isList { a = { b = ["b"]; }; c = [1]; }
|
||||
@@ -176,12 +117,7 @@ rec {
|
||||
|
||||
|
||||
/* Utility function that creates a {name, value} pair as expected by
|
||||
builtins.listToAttrs.
|
||||
|
||||
Example:
|
||||
nameValuePair "some" 6
|
||||
=> { name = "some"; value = 6; }
|
||||
*/
|
||||
builtins.listToAttrs. */
|
||||
nameValuePair = name: value: { inherit name value; };
|
||||
|
||||
|
||||
@@ -282,76 +218,36 @@ rec {
|
||||
listToAttrs (map (n: nameValuePair n (f n)) names);
|
||||
|
||||
|
||||
/* Check whether the argument is a derivation. Any set with
|
||||
{ type = "derivation"; } counts as a derivation.
|
||||
|
||||
Example:
|
||||
nixpkgs = import <nixpkgs> {}
|
||||
isDerivation nixpkgs.ruby
|
||||
=> true
|
||||
isDerivation "foobar"
|
||||
=> false
|
||||
*/
|
||||
/* Check whether the argument is a derivation. */
|
||||
isDerivation = x: isAttrs x && x ? type && x.type == "derivation";
|
||||
|
||||
/* Converts a store path to a fake derivation. */
|
||||
toDerivation = path:
|
||||
let
|
||||
path' = builtins.storePath path;
|
||||
res =
|
||||
{ type = "derivation";
|
||||
name = builtins.unsafeDiscardStringContext (builtins.substring 33 (-1) (baseNameOf path'));
|
||||
outPath = path';
|
||||
outputs = [ "out" ];
|
||||
out = res;
|
||||
outputName = "out";
|
||||
};
|
||||
in res;
|
||||
|
||||
|
||||
/* If `cond' is true, return the attribute set `as',
|
||||
otherwise an empty attribute set.
|
||||
|
||||
Example:
|
||||
optionalAttrs (true) { my = "set"; }
|
||||
=> { my = "set"; }
|
||||
optionalAttrs (false) { my = "set"; }
|
||||
=> { }
|
||||
*/
|
||||
/* If the Boolean `cond' is true, return the attribute set `as',
|
||||
otherwise an empty attribute set. */
|
||||
optionalAttrs = cond: as: if cond then as else {};
|
||||
|
||||
|
||||
/* Merge sets of attributes and use the function f to merge attributes
|
||||
values.
|
||||
|
||||
Example:
|
||||
zipAttrsWithNames ["a"] (name: vs: vs) [{a = "x";} {a = "y"; b = "z";}]
|
||||
=> { a = ["x" "y"]; }
|
||||
*/
|
||||
values. */
|
||||
zipAttrsWithNames = names: f: sets:
|
||||
listToAttrs (map (name: {
|
||||
inherit name;
|
||||
value = f name (catAttrs name sets);
|
||||
}) names);
|
||||
|
||||
/* Implentation note: Common names appear multiple times in the list of
|
||||
names, hopefully this does not affect the system because the maximal
|
||||
laziness avoid computing twice the same expression and listToAttrs does
|
||||
not care about duplicated attribute names.
|
||||
|
||||
Example:
|
||||
zipAttrsWith (name: values: values) [{a = "x";} {a = "y"; b = "z";}]
|
||||
=> { a = ["x" "y"]; b = ["z"] }
|
||||
*/
|
||||
# implentation note: Common names appear multiple times in the list of
|
||||
# names, hopefully this does not affect the system because the maximal
|
||||
# laziness avoid computing twice the same expression and listToAttrs does
|
||||
# not care about duplicated attribute names.
|
||||
zipAttrsWith = f: sets: zipAttrsWithNames (concatMap attrNames sets) f sets;
|
||||
/* Like `zipAttrsWith' with `(name: values: value)' as the function.
|
||||
|
||||
Example:
|
||||
zipAttrs [{a = "x";} {a = "y"; b = "z";}]
|
||||
=> { a = ["x" "y"]; b = ["z"] }
|
||||
*/
|
||||
zipAttrs = zipAttrsWith (name: values: values);
|
||||
|
||||
/* backward compatibility */
|
||||
zipWithNames = zipAttrsWithNames;
|
||||
zip = builtins.trace "lib.zip is deprecated, use lib.zipAttrsWith instead" zipAttrsWith;
|
||||
|
||||
|
||||
/* Does the same as the update operator '//' except that attributes are
|
||||
merged until the given pedicate is verified. The predicate should
|
||||
accept 3 arguments which are the path to reach the attribute, a part of
|
||||
@@ -391,7 +287,7 @@ rec {
|
||||
);
|
||||
in f [] [rhs lhs];
|
||||
|
||||
/* A recursive variant of the update operator ‘//’. The recursion
|
||||
/* A recursive variant of the update operator ‘//’. The recusion
|
||||
stops when one of the attribute values is not an attribute set,
|
||||
in which case the right hand side value takes precedence over the
|
||||
left hand side value.
|
||||
@@ -415,15 +311,6 @@ rec {
|
||||
!(isAttrs lhs && isAttrs rhs)
|
||||
) lhs rhs;
|
||||
|
||||
/* Returns true if the pattern is contained in the set. False otherwise.
|
||||
|
||||
FIXME(zimbatm): this example doesn't work !!!
|
||||
|
||||
Example:
|
||||
sys = mkSystem { }
|
||||
matchAttrs { cpu = { bits = 64; }; } sys
|
||||
=> true
|
||||
*/
|
||||
matchAttrs = pattern: attrs:
|
||||
fold or false (attrValues (zipAttrsWithNames (attrNames pattern) (n: values:
|
||||
let pat = head values; val = head (tail values); in
|
||||
@@ -432,40 +319,10 @@ rec {
|
||||
else pat == val
|
||||
) [pattern attrs]));
|
||||
|
||||
/* Override only the attributes that are already present in the old set
|
||||
useful for deep-overriding.
|
||||
|
||||
Example:
|
||||
x = { a = { b = 4; c = 3; }; }
|
||||
overrideExisting x { a = { b = 6; d = 2; }; }
|
||||
=> { a = { b = 6; d = 2; }; }
|
||||
*/
|
||||
# override only the attributes that are already present in the old set
|
||||
# useful for deep-overriding
|
||||
overrideExisting = old: new:
|
||||
old // listToAttrs (map (attr: nameValuePair attr (attrByPath [attr] old.${attr} new)) (attrNames old));
|
||||
|
||||
/* Get a package output.
|
||||
If no output is found, fallback to `.out` and then to the default.
|
||||
|
||||
Example:
|
||||
getOutput "dev" pkgs.openssl
|
||||
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev"
|
||||
*/
|
||||
getOutput = output: pkg:
|
||||
if pkg.outputUnspecified or false
|
||||
then pkg.${output} or pkg.out or pkg
|
||||
else pkg;
|
||||
|
||||
getBin = getOutput "bin";
|
||||
getLib = getOutput "lib";
|
||||
getDev = getOutput "dev";
|
||||
|
||||
/* Pick the outputs of packages to place in buildInputs */
|
||||
chooseDevOutputs = drvs: builtins.map getDev drvs;
|
||||
|
||||
/*** deprecated stuff ***/
|
||||
|
||||
zipWithNames = zipAttrsWithNames;
|
||||
zip = builtins.trace
|
||||
"lib.zip is deprecated, use lib.zipAttrsWith instead" zipAttrsWith;
|
||||
|
||||
deepSeqAttrs = x: y: deepSeqList (attrValues x) y;
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ let inherit (lib) nv nvs; in
|
||||
# nice features:
|
||||
# declaring "optional featuers" is modular. For instance:
|
||||
# flags.curl = {
|
||||
# configureFlags = ["--with-curl=${curl.dev}" "--with-curlwrappers"];
|
||||
# configureFlags = ["--with-curl=${curl}" "--with-curlwrappers"];
|
||||
# buildInputs = [curl openssl];
|
||||
# };
|
||||
# flags.other = { .. }
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
let
|
||||
|
||||
lib = import ./default.nix;
|
||||
inherit (builtins) attrNames isFunction;
|
||||
|
||||
in
|
||||
|
||||
rec {
|
||||
@@ -31,8 +29,8 @@ rec {
|
||||
|
||||
For another application, see build-support/vm, where this
|
||||
function is used to build arbitrary derivations inside a QEMU
|
||||
virtual machine.
|
||||
*/
|
||||
virtual machine. */
|
||||
|
||||
overrideDerivation = drv: f:
|
||||
let
|
||||
newDrv = derivation (drv.drvAttrs // (f drv));
|
||||
@@ -51,25 +49,29 @@ rec {
|
||||
else { }));
|
||||
|
||||
|
||||
# usage: (you can use override multiple times)
|
||||
# let d = makeOverridable stdenv.mkDerivation { name = ..; buildInputs; }
|
||||
# noBuildInputs = d.override { buildInputs = []; }
|
||||
# additionalBuildInputs = d.override ( args : args // { buildInputs = args.buildInputs ++ [ additional ]; } )
|
||||
makeOverridable = f: origArgs:
|
||||
let
|
||||
ff = f origArgs;
|
||||
overrideWith = newArgs: origArgs // (if builtins.isFunction newArgs then newArgs origArgs else newArgs);
|
||||
in
|
||||
if builtins.isAttrs ff then (ff // {
|
||||
override = newArgs: makeOverridable f (overrideWith newArgs);
|
||||
overrideDerivation = fdrv:
|
||||
makeOverridable (args: overrideDerivation (f args) fdrv) origArgs;
|
||||
${if ff ? overrideAttrs then "overrideAttrs" else null} = fdrv:
|
||||
makeOverridable (args: (f args).overrideAttrs fdrv) origArgs;
|
||||
})
|
||||
else if builtins.isFunction ff then {
|
||||
override = newArgs: makeOverridable f (overrideWith newArgs);
|
||||
__functor = self: ff;
|
||||
overrideDerivation = throw "overrideDerivation not yet supported for functors";
|
||||
}
|
||||
if builtins.isAttrs ff then (ff //
|
||||
{ override = newArgs:
|
||||
makeOverridable f (origArgs // (if builtins.isFunction newArgs then newArgs origArgs else newArgs));
|
||||
deepOverride = newArgs:
|
||||
makeOverridable f (lib.overrideExisting (lib.mapAttrs (deepOverrider newArgs) origArgs) newArgs);
|
||||
overrideDerivation = fdrv:
|
||||
makeOverridable (args: overrideDerivation (f args) fdrv) origArgs;
|
||||
})
|
||||
else ff;
|
||||
|
||||
deepOverrider = newArgs: name: x: if builtins.isAttrs x then (
|
||||
if x ? deepOverride then (x.deepOverride newArgs) else
|
||||
if x ? override then (x.override newArgs) else
|
||||
x) else x;
|
||||
|
||||
|
||||
/* Call the package function in the file `fn' with the required
|
||||
arguments automatically. The function is called with the
|
||||
@@ -93,29 +95,10 @@ rec {
|
||||
};
|
||||
*/
|
||||
callPackageWith = autoArgs: fn: args:
|
||||
let
|
||||
f = if builtins.isFunction fn then fn else import fn;
|
||||
auto = builtins.intersectAttrs (builtins.functionArgs f) autoArgs;
|
||||
in makeOverridable f (auto // args);
|
||||
let f = if builtins.isFunction fn then fn else import fn; in
|
||||
makeOverridable f ((builtins.intersectAttrs (builtins.functionArgs f) autoArgs) // args);
|
||||
|
||||
|
||||
/* Like callPackage, but for a function that returns an attribute
|
||||
set of derivations. The override function is added to the
|
||||
individual attributes. */
|
||||
callPackagesWith = autoArgs: fn: args:
|
||||
let
|
||||
f = if builtins.isFunction fn then fn else import fn;
|
||||
auto = builtins.intersectAttrs (builtins.functionArgs f) autoArgs;
|
||||
finalArgs = auto // args;
|
||||
pkgs = f finalArgs;
|
||||
mkAttrOverridable = name: pkg: pkg // {
|
||||
override = newArgs: mkAttrOverridable name (f (finalArgs // newArgs)).${name};
|
||||
};
|
||||
in lib.mapAttrs mkAttrOverridable pkgs;
|
||||
|
||||
|
||||
/* Add attributes to each output of a derivation without changing
|
||||
the derivation itself. */
|
||||
/* Add attributes to each output of a derivation without changing the derivation itself */
|
||||
addPassthru = drv: passthru:
|
||||
let
|
||||
outputs = drv.outputs or [ "out" ];
|
||||
@@ -131,58 +114,5 @@ rec {
|
||||
};
|
||||
|
||||
outputsList = map outputToAttrListElement outputs;
|
||||
in commonAttrs // { outputUnspecified = true; };
|
||||
|
||||
|
||||
/* Strip a derivation of all non-essential attributes, returning
|
||||
only those needed by hydra-eval-jobs. Also strictly evaluate the
|
||||
result to ensure that there are no thunks kept alive to prevent
|
||||
garbage collection. */
|
||||
hydraJob = drv:
|
||||
let
|
||||
outputs = drv.outputs or ["out"];
|
||||
|
||||
commonAttrs =
|
||||
{ inherit (drv) name system meta; inherit outputs; }
|
||||
// lib.optionalAttrs (drv._hydraAggregate or false) {
|
||||
_hydraAggregate = true;
|
||||
constituents = map hydraJob (lib.flatten drv.constituents);
|
||||
}
|
||||
// (lib.listToAttrs outputsList);
|
||||
|
||||
makeOutput = outputName:
|
||||
let output = drv.${outputName}; in
|
||||
{ name = outputName;
|
||||
value = commonAttrs // {
|
||||
outPath = output.outPath;
|
||||
drvPath = output.drvPath;
|
||||
type = "derivation";
|
||||
inherit outputName;
|
||||
};
|
||||
};
|
||||
|
||||
outputsList = map makeOutput outputs;
|
||||
|
||||
drv' = (lib.head outputsList).value;
|
||||
in lib.deepSeq drv' drv';
|
||||
|
||||
/* Make a set of packages with a common scope. All packages called
|
||||
with the provided `callPackage' will be evaluated with the same
|
||||
arguments. Any package in the set may depend on any other. The
|
||||
`override' function allows subsequent modification of the package
|
||||
set in a consistent way, i.e. all packages in the set will be
|
||||
called with the overridden packages. The package sets may be
|
||||
hierarchical: the packages in the set are called with the scope
|
||||
provided by `newScope' and the set provides a `newScope' attribute
|
||||
which can form the parent scope for later package sets. */
|
||||
makeScope = newScope: f:
|
||||
let self = f self // {
|
||||
newScope = scope: newScope (self // scope);
|
||||
callPackage = self.newScope {};
|
||||
override = g: makeScope newScope (self_:
|
||||
let super = f self_;
|
||||
in super // g super self_);
|
||||
};
|
||||
in self;
|
||||
|
||||
in commonAttrs.${drv.outputName};
|
||||
}
|
||||
|
||||
@@ -13,15 +13,10 @@ rec {
|
||||
|
||||
addErrorContextToAttrs = lib.mapAttrs (a: v: lib.addErrorContext "while evaluating ${a}" v);
|
||||
|
||||
traceIf = p: msg: x: if p then trace msg x else x;
|
||||
|
||||
traceVal = x: trace x x;
|
||||
traceXMLVal = x: trace (builtins.toXML x) x;
|
||||
traceXMLValMarked = str: x: trace (str + builtins.toXML x) x;
|
||||
|
||||
# strict trace functions (traced structure is fully evaluated and printed)
|
||||
traceSeq = x: y: trace (builtins.deepSeq x x) y;
|
||||
traceValSeq = v: traceVal (builtins.deepSeq v v);
|
||||
traceVal = x: builtins.trace x x;
|
||||
traceXMLVal = x: builtins.trace (builtins.toXML x) x;
|
||||
traceXMLValMarked = str: x: builtins.trace (str + builtins.toXML x) x;
|
||||
|
||||
# this can help debug your code as well - designed to not produce thousands of lines
|
||||
traceShowVal = x : trace (showVal x) x;
|
||||
@@ -47,7 +42,6 @@ rec {
|
||||
traceCall2 = n : f : a : b : let t = n2 : x : traceShowValMarked "${n} ${n2}:" x; in t "result" (f (t "arg 1" a) (t "arg 2" b));
|
||||
traceCall3 = n : f : a : b : c : let t = n2 : x : traceShowValMarked "${n} ${n2}:" x; in t "result" (f (t "arg 1" a) (t "arg 2" b) (t "arg 3" c));
|
||||
|
||||
# FIXME: rename this?
|
||||
traceValIfNot = c: x:
|
||||
if c x then true else trace (showVal x) false;
|
||||
|
||||
@@ -73,9 +67,27 @@ rec {
|
||||
# usage: { testX = allTrue [ true ]; }
|
||||
testAllTrue = expr : { inherit expr; expected = map (x: true) expr; };
|
||||
|
||||
strict = v:
|
||||
trace "Warning: strict is deprecated and will be removed in the next release"
|
||||
(builtins.seq v v);
|
||||
# evaluate everything once so that errors will occur earlier
|
||||
# hacky: traverse attrs by adding a dummy
|
||||
# ignores functions (should this behavior change?) See strictf
|
||||
#
|
||||
# Note: This should be a primop! Something like seq of haskell would be nice to
|
||||
# have as well. It's used fore debugging only anyway
|
||||
strict = x :
|
||||
let
|
||||
traverse = x :
|
||||
if isString x then true
|
||||
else if isAttrs x then
|
||||
if x ? outPath then true
|
||||
else all id (mapAttrsFlatten (n: traverse) x)
|
||||
else if isList x then
|
||||
all id (map traverse x)
|
||||
else if isBool x then true
|
||||
else if isFunction x then true
|
||||
else if isInt x then true
|
||||
else if x == null then true
|
||||
else true; # a (store) path?
|
||||
in if traverse x then x else throw "else never reached";
|
||||
|
||||
# example: (traceCallXml "myfun" id 3) will output something like
|
||||
# calling myfun arg 1: 3 result: 3
|
||||
@@ -94,6 +106,6 @@ rec {
|
||||
)
|
||||
else
|
||||
let r = strict expr;
|
||||
in trace "${str}\n result:\n${builtins.toXML r}" r
|
||||
in builtins.trace "${str}\n result:\n${builtins.toXML r}" r
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,47 +1,26 @@
|
||||
let
|
||||
let
|
||||
|
||||
# trivial, often used functions
|
||||
trivial = import ./trivial.nix;
|
||||
|
||||
# datatypes
|
||||
attrsets = import ./attrsets.nix;
|
||||
lists = import ./lists.nix;
|
||||
strings = import ./strings.nix;
|
||||
stringsWithDeps = import ./strings-with-deps.nix;
|
||||
|
||||
# packaging
|
||||
customisation = import ./customisation.nix;
|
||||
maintainers = import ./maintainers.nix;
|
||||
meta = import ./meta.nix;
|
||||
attrsets = import ./attrsets.nix;
|
||||
sources = import ./sources.nix;
|
||||
|
||||
# module system
|
||||
modules = import ./modules.nix;
|
||||
options = import ./options.nix;
|
||||
types = import ./types.nix;
|
||||
|
||||
# constants
|
||||
licenses = import ./licenses.nix;
|
||||
meta = import ./meta.nix;
|
||||
debug = import ./debug.nix;
|
||||
misc = import ./misc.nix;
|
||||
maintainers = import ./maintainers.nix;
|
||||
platforms = import ./platforms.nix;
|
||||
systems = import ./systems.nix;
|
||||
|
||||
# misc
|
||||
debug = import ./debug.nix;
|
||||
generators = import ./generators.nix;
|
||||
misc = import ./deprecated.nix;
|
||||
|
||||
# domain-specific
|
||||
sandbox = import ./sandbox.nix;
|
||||
fetchers = import ./fetchers.nix;
|
||||
customisation = import ./customisation.nix;
|
||||
licenses = import ./licenses.nix;
|
||||
|
||||
in
|
||||
{ inherit trivial
|
||||
attrsets lists strings stringsWithDeps
|
||||
customisation maintainers meta sources
|
||||
modules options types
|
||||
licenses platforms systems
|
||||
debug generators misc
|
||||
sandbox fetchers;
|
||||
{ inherit trivial lists strings stringsWithDeps attrsets sources options
|
||||
modules types meta debug maintainers licenses platforms systems;
|
||||
}
|
||||
# !!! don't include everything at top-level; perhaps only the most
|
||||
# commonly used functions.
|
||||
|
||||
@@ -1,423 +0,0 @@
|
||||
let lib = import ./default.nix;
|
||||
inherit (builtins) isFunction head tail isList isAttrs isInt attrNames;
|
||||
|
||||
in
|
||||
|
||||
with import ./lists.nix;
|
||||
with import ./attrsets.nix;
|
||||
with import ./strings.nix;
|
||||
|
||||
rec {
|
||||
|
||||
# returns default if env var is not set
|
||||
maybeEnv = name: default:
|
||||
let value = builtins.getEnv name; in
|
||||
if value == "" then default else value;
|
||||
|
||||
defaultMergeArg = x : y: if builtins.isAttrs y then
|
||||
y
|
||||
else
|
||||
(y x);
|
||||
defaultMerge = x: y: x // (defaultMergeArg x y);
|
||||
foldArgs = merger: f: init: x:
|
||||
let arg=(merger init (defaultMergeArg init x));
|
||||
# now add the function with composed args already applied to the final attrs
|
||||
base = (setAttrMerge "passthru" {} (f arg)
|
||||
( z : z // rec {
|
||||
function = foldArgs merger f arg;
|
||||
args = (lib.attrByPath ["passthru" "args"] {} z) // x;
|
||||
} ));
|
||||
withStdOverrides = base // {
|
||||
override = base.passthru.function;
|
||||
} ;
|
||||
in
|
||||
withStdOverrides;
|
||||
|
||||
|
||||
# predecessors: proposed replacement for applyAndFun (which has a bug cause it merges twice)
|
||||
# the naming "overridableDelayableArgs" tries to express that you can
|
||||
# - override attr values which have been supplied earlier
|
||||
# - use attr values before they have been supplied by accessing the fix point
|
||||
# name "fixed"
|
||||
# f: the (delayed overridden) arguments are applied to this
|
||||
#
|
||||
# initial: initial attrs arguments and settings. see defaultOverridableDelayableArgs
|
||||
#
|
||||
# returns: f applied to the arguments // special attributes attrs
|
||||
# a) merge: merge applied args with new args. Wether an argument is overridden depends on the merge settings
|
||||
# b) replace: this let's you replace and remove names no matter which merge function has been set
|
||||
#
|
||||
# examples: see test cases "res" below;
|
||||
overridableDelayableArgs =
|
||||
f : # the function applied to the arguments
|
||||
initial : # you pass attrs, the functions below are passing a function taking the fix argument
|
||||
let
|
||||
takeFixed = if isFunction initial then initial else (fixed : initial); # transform initial to an expression always taking the fixed argument
|
||||
tidy = args :
|
||||
let # apply all functions given in "applyPreTidy" in sequence
|
||||
applyPreTidyFun = fold ( n : a : x : n ( a x ) ) lib.id (maybeAttr "applyPreTidy" [] args);
|
||||
in removeAttrs (applyPreTidyFun args) ( ["applyPreTidy"] ++ (maybeAttr "removeAttrs" [] args) ); # tidy up args before applying them
|
||||
fun = n : x :
|
||||
let newArgs = fixed :
|
||||
let args = takeFixed fixed;
|
||||
mergeFun = args.${n};
|
||||
in if isAttrs x then (mergeFun args x)
|
||||
else assert isFunction x;
|
||||
mergeFun args (x ( args // { inherit fixed; }));
|
||||
in overridableDelayableArgs f newArgs;
|
||||
in
|
||||
(f (tidy (lib.fix takeFixed))) // {
|
||||
merge = fun "mergeFun";
|
||||
replace = fun "keepFun";
|
||||
};
|
||||
defaultOverridableDelayableArgs = f :
|
||||
let defaults = {
|
||||
mergeFun = mergeAttrByFunc; # default merge function. merge strategie (concatenate lists, strings) is given by mergeAttrBy
|
||||
keepFun = a : b : { inherit (a) removeAttrs mergeFun keepFun mergeAttrBy; } // b; # even when using replace preserve these values
|
||||
applyPreTidy = []; # list of functions applied to args before args are tidied up (usage case : prepareDerivationArgs)
|
||||
mergeAttrBy = mergeAttrBy // {
|
||||
applyPreTidy = a : b : a ++ b;
|
||||
removeAttrs = a : b: a ++ b;
|
||||
};
|
||||
removeAttrs = ["mergeFun" "keepFun" "mergeAttrBy" "removeAttrs" "fixed" ]; # before applying the arguments to the function make sure these names are gone
|
||||
};
|
||||
in (overridableDelayableArgs f defaults).merge;
|
||||
|
||||
|
||||
|
||||
# rec { # an example of how composedArgsAndFun can be used
|
||||
# a = composedArgsAndFun (x : x) { a = ["2"]; meta = { d = "bar";}; };
|
||||
# # meta.d will be lost ! It's your task to preserve it (eg using a merge function)
|
||||
# b = a.passthru.function { a = [ "3" ]; meta = { d2 = "bar2";}; };
|
||||
# # instead of passing/ overriding values you can use a merge function:
|
||||
# c = b.passthru.function ( x: { a = x.a ++ ["4"]; }); # consider using (maybeAttr "a" [] x)
|
||||
# }
|
||||
# result:
|
||||
# {
|
||||
# a = { a = ["2"]; meta = { d = "bar"; }; passthru = { function = .. }; };
|
||||
# b = { a = ["3"]; meta = { d2 = "bar2"; }; passthru = { function = .. }; };
|
||||
# c = { a = ["3" "4"]; meta = { d2 = "bar2"; }; passthru = { function = .. }; };
|
||||
# # c2 is equal to c
|
||||
# }
|
||||
composedArgsAndFun = f: foldArgs defaultMerge f {};
|
||||
|
||||
|
||||
# shortcut for attrByPath ["name"] default attrs
|
||||
maybeAttrNullable = maybeAttr;
|
||||
|
||||
# shortcut for attrByPath ["name"] default attrs
|
||||
maybeAttr = name: default: attrs: attrs.${name} or default;
|
||||
|
||||
|
||||
# Return the second argument if the first one is true or the empty version
|
||||
# of the second argument.
|
||||
ifEnable = cond: val:
|
||||
if cond then val
|
||||
else if builtins.isList val then []
|
||||
else if builtins.isAttrs val then {}
|
||||
# else if builtins.isString val then ""
|
||||
else if val == true || val == false then false
|
||||
else null;
|
||||
|
||||
|
||||
# Return true only if there is an attribute and it is true.
|
||||
checkFlag = attrSet: name:
|
||||
if name == "true" then true else
|
||||
if name == "false" then false else
|
||||
if (elem name (attrByPath ["flags"] [] attrSet)) then true else
|
||||
attrByPath [name] false attrSet ;
|
||||
|
||||
|
||||
# Input : attrSet, [ [name default] ... ], name
|
||||
# Output : its value or default.
|
||||
getValue = attrSet: argList: name:
|
||||
( attrByPath [name] (if checkFlag attrSet name then true else
|
||||
if argList == [] then null else
|
||||
let x = builtins.head argList; in
|
||||
if (head x) == name then
|
||||
(head (tail x))
|
||||
else (getValue attrSet
|
||||
(tail argList) name)) attrSet );
|
||||
|
||||
|
||||
# Input : attrSet, [[name default] ...], [ [flagname reqs..] ... ]
|
||||
# Output : are reqs satisfied? It's asserted.
|
||||
checkReqs = attrSet : argList : condList :
|
||||
(
|
||||
fold lib.and true
|
||||
(map (x: let name = (head x) ; in
|
||||
|
||||
((checkFlag attrSet name) ->
|
||||
(fold lib.and true
|
||||
(map (y: let val=(getValue attrSet argList y); in
|
||||
(val!=null) && (val!=false))
|
||||
(tail x))))) condList)) ;
|
||||
|
||||
|
||||
# This function has O(n^2) performance.
|
||||
uniqList = {inputList, acc ? []} :
|
||||
let go = xs : acc :
|
||||
if xs == []
|
||||
then []
|
||||
else let x = head xs;
|
||||
y = if elem x acc then [] else [x];
|
||||
in y ++ go (tail xs) (y ++ acc);
|
||||
in go inputList acc;
|
||||
|
||||
uniqListExt = {inputList, outputList ? [],
|
||||
getter ? (x : x), compare ? (x: y: x==y)}:
|
||||
if inputList == [] then outputList else
|
||||
let x=head inputList;
|
||||
isX = y: (compare (getter y) (getter x));
|
||||
newOutputList = outputList ++
|
||||
(if any isX outputList then [] else [x]);
|
||||
in uniqListExt {outputList=newOutputList;
|
||||
inputList = (tail inputList);
|
||||
inherit getter compare;
|
||||
};
|
||||
|
||||
|
||||
|
||||
condConcat = name: list: checker:
|
||||
if list == [] then name else
|
||||
if checker (head list) then
|
||||
condConcat
|
||||
(name + (head (tail list)))
|
||||
(tail (tail list))
|
||||
checker
|
||||
else condConcat
|
||||
name (tail (tail list)) checker;
|
||||
|
||||
lazyGenericClosure = {startSet, operator}:
|
||||
let
|
||||
work = list: doneKeys: result:
|
||||
if list == [] then
|
||||
result
|
||||
else
|
||||
let x = head list; key = x.key; in
|
||||
if elem key doneKeys then
|
||||
work (tail list) doneKeys result
|
||||
else
|
||||
work (tail list ++ operator x) ([key] ++ doneKeys) ([x] ++ result);
|
||||
in
|
||||
work startSet [] [];
|
||||
|
||||
innerModifySumArgs = f: x: a: b: if b == null then (f a b) // x else
|
||||
innerModifySumArgs f x (a // b);
|
||||
modifySumArgs = f: x: innerModifySumArgs f x {};
|
||||
|
||||
|
||||
innerClosePropagation = acc : xs :
|
||||
if xs == []
|
||||
then acc
|
||||
else let y = head xs;
|
||||
ys = tail xs;
|
||||
in if ! isAttrs y
|
||||
then innerClosePropagation acc ys
|
||||
else let acc' = [y] ++ acc;
|
||||
in innerClosePropagation
|
||||
acc'
|
||||
(uniqList { inputList = (maybeAttrNullable "propagatedBuildInputs" [] y)
|
||||
++ (maybeAttrNullable "propagatedNativeBuildInputs" [] y)
|
||||
++ ys;
|
||||
acc = acc';
|
||||
}
|
||||
);
|
||||
|
||||
closePropagation = list: (uniqList {inputList = (innerClosePropagation [] list);});
|
||||
|
||||
# calls a function (f attr value ) for each record item. returns a list
|
||||
mapAttrsFlatten = f : r : map (attr: f attr r.${attr}) (attrNames r);
|
||||
|
||||
# attribute set containing one attribute
|
||||
nvs = name : value : listToAttrs [ (nameValuePair name value) ];
|
||||
# adds / replaces an attribute of an attribute set
|
||||
setAttr = set : name : v : set // (nvs name v);
|
||||
|
||||
# setAttrMerge (similar to mergeAttrsWithFunc but only merges the values of a particular name)
|
||||
# setAttrMerge "a" [] { a = [2];} (x : x ++ [3]) -> { a = [2 3]; }
|
||||
# setAttrMerge "a" [] { } (x : x ++ [3]) -> { a = [ 3]; }
|
||||
setAttrMerge = name : default : attrs : f :
|
||||
setAttr attrs name (f (maybeAttr name default attrs));
|
||||
|
||||
# Using f = a : b = b the result is similar to //
|
||||
# merge attributes with custom function handling the case that the attribute
|
||||
# exists in both sets
|
||||
mergeAttrsWithFunc = f : set1 : set2 :
|
||||
fold (n: set : if set ? ${n}
|
||||
then setAttr set n (f set.${n} set2.${n})
|
||||
else set )
|
||||
(set2 // set1) (attrNames set2);
|
||||
|
||||
# merging two attribute set concatenating the values of same attribute names
|
||||
# eg { a = 7; } { a = [ 2 3 ]; } becomes { a = [ 7 2 3 ]; }
|
||||
mergeAttrsConcatenateValues = mergeAttrsWithFunc ( a : b : (toList a) ++ (toList b) );
|
||||
|
||||
# merges attributes using //, if a name exisits in both attributes
|
||||
# an error will be triggered unless its listed in mergeLists
|
||||
# so you can mergeAttrsNoOverride { buildInputs = [a]; } { buildInputs = [a]; } {} to get
|
||||
# { buildInputs = [a b]; }
|
||||
# merging buildPhase does'nt really make sense. The cases will be rare where appending /prefixing will fit your needs?
|
||||
# in these cases the first buildPhase will override the second one
|
||||
# ! deprecated, use mergeAttrByFunc instead
|
||||
mergeAttrsNoOverride = { mergeLists ? ["buildInputs" "propagatedBuildInputs"],
|
||||
overrideSnd ? [ "buildPhase" ]
|
||||
} : attrs1 : attrs2 :
|
||||
fold (n: set :
|
||||
setAttr set n ( if set ? ${n}
|
||||
then # merge
|
||||
if elem n mergeLists # attribute contains list, merge them by concatenating
|
||||
then attrs2.${n} ++ attrs1.${n}
|
||||
else if elem n overrideSnd
|
||||
then attrs1.${n}
|
||||
else throw "error mergeAttrsNoOverride, attribute ${n} given in both attributes - no merge func defined"
|
||||
else attrs2.${n} # add attribute not existing in attr1
|
||||
)) attrs1 (attrNames attrs2);
|
||||
|
||||
|
||||
# example usage:
|
||||
# mergeAttrByFunc {
|
||||
# inherit mergeAttrBy; # defined below
|
||||
# buildInputs = [ a b ];
|
||||
# } {
|
||||
# buildInputs = [ c d ];
|
||||
# };
|
||||
# will result in
|
||||
# { mergeAttrsBy = [...]; buildInputs = [ a b c d ]; }
|
||||
# is used by prepareDerivationArgs, defaultOverridableDelayableArgs and can be used when composing using
|
||||
# foldArgs, composedArgsAndFun or applyAndFun. Example: composableDerivation in all-packages.nix
|
||||
mergeAttrByFunc = x : y :
|
||||
let
|
||||
mergeAttrBy2 = { mergeAttrBy=lib.mergeAttrs; }
|
||||
// (maybeAttr "mergeAttrBy" {} x)
|
||||
// (maybeAttr "mergeAttrBy" {} y); in
|
||||
fold lib.mergeAttrs {} [
|
||||
x y
|
||||
(mapAttrs ( a : v : # merge special names using given functions
|
||||
if x ? ${a}
|
||||
then if y ? ${a}
|
||||
then v x.${a} y.${a} # both have attr, use merge func
|
||||
else x.${a} # only x has attr
|
||||
else y.${a} # only y has attr)
|
||||
) (removeAttrs mergeAttrBy2
|
||||
# don't merge attrs which are neither in x nor y
|
||||
(filter (a: ! x ? ${a} && ! y ? ${a})
|
||||
(attrNames mergeAttrBy2))
|
||||
)
|
||||
)
|
||||
];
|
||||
mergeAttrsByFuncDefaults = foldl mergeAttrByFunc { inherit mergeAttrBy; };
|
||||
mergeAttrsByFuncDefaultsClean = list: removeAttrs (mergeAttrsByFuncDefaults list) ["mergeAttrBy"];
|
||||
|
||||
# merge attrs based on version key into mkDerivation args, see mergeAttrBy to learn about smart merge defaults
|
||||
#
|
||||
# This function is best explained by an example:
|
||||
#
|
||||
# {version ? "2.x"} :
|
||||
#
|
||||
# mkDerivation (mergeAttrsByVersion "package-name" version
|
||||
# { # version specific settings
|
||||
# "git" = { src = ..; preConfigre = "autogen.sh"; buildInputs = [automake autoconf libtool]; };
|
||||
# "2.x" = { src = ..; };
|
||||
# }
|
||||
# { // shared settings
|
||||
# buildInputs = [ common build inputs ];
|
||||
# meta = { .. }
|
||||
# }
|
||||
# )
|
||||
#
|
||||
# Please note that e.g. Eelco Dolstra usually prefers having one file for
|
||||
# each version. On the other hand there are valuable additional design goals
|
||||
# - readability
|
||||
# - do it once only
|
||||
# - try to avoid duplication
|
||||
#
|
||||
# Marc Weber and Michael Raskin sometimes prefer keeping older
|
||||
# versions around for testing and regression tests - as long as its cheap to
|
||||
# do so.
|
||||
#
|
||||
# Very often it just happens that the "shared" code is the bigger part.
|
||||
# Then using this function might be appropriate.
|
||||
#
|
||||
# Be aware that its easy to cause recompilations in all versions when using
|
||||
# this function - also if derivations get too complex splitting into multiple
|
||||
# files is the way to go.
|
||||
#
|
||||
# See misc.nix -> versionedDerivation
|
||||
# discussion: nixpkgs: pull/310
|
||||
mergeAttrsByVersion = name: version: attrsByVersion: base:
|
||||
mergeAttrsByFuncDefaultsClean [ { name = "${name}-${version}"; } base (maybeAttr version (throw "bad version ${version} for ${name}") attrsByVersion)];
|
||||
|
||||
# sane defaults (same name as attr name so that inherit can be used)
|
||||
mergeAttrBy = # { buildInputs = concatList; [...]; passthru = mergeAttr; [..]; }
|
||||
listToAttrs (map (n : nameValuePair n lib.concat)
|
||||
[ "nativeBuildInputs" "buildInputs" "propagatedBuildInputs" "configureFlags" "prePhases" "postAll" "patches" ])
|
||||
// listToAttrs (map (n : nameValuePair n lib.mergeAttrs) [ "passthru" "meta" "cfg" "flags" ])
|
||||
// listToAttrs (map (n : nameValuePair n (a: b: "${a}\n${b}") ) [ "preConfigure" "postInstall" ])
|
||||
;
|
||||
|
||||
# prepareDerivationArgs tries to make writing configurable derivations easier
|
||||
# example:
|
||||
# prepareDerivationArgs {
|
||||
# mergeAttrBy = {
|
||||
# myScript = x : y : x ++ "\n" ++ y;
|
||||
# };
|
||||
# cfg = {
|
||||
# readlineSupport = true;
|
||||
# };
|
||||
# flags = {
|
||||
# readline = {
|
||||
# set = {
|
||||
# configureFlags = [ "--with-compiler=${compiler}" ];
|
||||
# buildInputs = [ compiler ];
|
||||
# pass = { inherit compiler; READLINE=1; };
|
||||
# assertion = compiler.dllSupport;
|
||||
# myScript = "foo";
|
||||
# };
|
||||
# unset = { configureFlags = ["--without-compiler"]; };
|
||||
# };
|
||||
# };
|
||||
# src = ...
|
||||
# buildPhase = '' ... '';
|
||||
# name = ...
|
||||
# myScript = "bar";
|
||||
# };
|
||||
# if you don't have need for unset you can omit the surrounding set = { .. } attr
|
||||
# all attrs except flags cfg and mergeAttrBy will be merged with the
|
||||
# additional data from flags depending on config settings
|
||||
# It's used in composableDerivation in all-packages.nix. It's also used
|
||||
# heavily in the new python and libs implementation
|
||||
#
|
||||
# should we check for misspelled cfg options?
|
||||
# TODO use args.mergeFun here as well?
|
||||
prepareDerivationArgs = args:
|
||||
let args2 = { cfg = {}; flags = {}; } // args;
|
||||
flagName = name : "${name}Support";
|
||||
cfgWithDefaults = (listToAttrs (map (n : nameValuePair (flagName n) false) (attrNames args2.flags)))
|
||||
// args2.cfg;
|
||||
opts = attrValues (mapAttrs (a : v :
|
||||
let v2 = if v ? set || v ? unset then v else { set = v; };
|
||||
n = if cfgWithDefaults.${flagName a} then "set" else "unset";
|
||||
attr = maybeAttr n {} v2; in
|
||||
if (maybeAttr "assertion" true attr)
|
||||
then attr
|
||||
else throw "assertion of flag ${a} of derivation ${args.name} failed"
|
||||
) args2.flags );
|
||||
in removeAttrs
|
||||
(mergeAttrsByFuncDefaults ([args] ++ opts ++ [{ passthru = cfgWithDefaults; }]))
|
||||
["flags" "cfg" "mergeAttrBy" ];
|
||||
|
||||
|
||||
nixType = x:
|
||||
if isAttrs x then
|
||||
if x ? outPath then "derivation"
|
||||
else "attrs"
|
||||
else if isFunction x then "function"
|
||||
else if isList x then "list"
|
||||
else if x == true then "bool"
|
||||
else if x == false then "bool"
|
||||
else if x == null then "null"
|
||||
else if isInt x then "int"
|
||||
else "string";
|
||||
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
# snippets that can be shared by mutliple fetchers (pkgs/build-support)
|
||||
{
|
||||
|
||||
proxyImpureEnvVars = [
|
||||
# We borrow these environment variables from the caller to allow
|
||||
# easy proxy configuration. This is impure, but a fixed-output
|
||||
# derivation like fetchurl is allowed to do so since its result is
|
||||
# by definition pure.
|
||||
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
|
||||
];
|
||||
|
||||
}
|
||||
@@ -1,93 +0,0 @@
|
||||
/* Functions that generate widespread file
|
||||
* formats from nix data structures.
|
||||
*
|
||||
* They all follow a similar interface:
|
||||
* generator { config-attrs } data
|
||||
*
|
||||
* Tests can be found in ./tests.nix
|
||||
* Documentation in the manual, #sec-generators
|
||||
*/
|
||||
with import ./trivial.nix;
|
||||
let
|
||||
libStr = import ./strings.nix;
|
||||
libAttr = import ./attrsets.nix;
|
||||
|
||||
flipMapAttrs = flip libAttr.mapAttrs;
|
||||
in
|
||||
|
||||
rec {
|
||||
|
||||
/* Generate a line of key k and value v, separated by
|
||||
* character sep. If sep appears in k, it is escaped.
|
||||
* Helper for synaxes with different separators.
|
||||
*
|
||||
* mkKeyValueDefault ":" "f:oo" "bar"
|
||||
* > "f\:oo:bar"
|
||||
*/
|
||||
mkKeyValueDefault = sep: k: v:
|
||||
"${libStr.escape [sep] k}${sep}${toString v}";
|
||||
|
||||
|
||||
/* Generate a key-value-style config file from an attrset.
|
||||
*
|
||||
* mkKeyValue is the same as in toINI.
|
||||
*/
|
||||
toKeyValue = {
|
||||
mkKeyValue ? mkKeyValueDefault "="
|
||||
}: attrs:
|
||||
let mkLine = k: v: mkKeyValue k v + "\n";
|
||||
in libStr.concatStrings (libAttr.mapAttrsToList mkLine attrs);
|
||||
|
||||
|
||||
/* Generate an INI-style config file from an
|
||||
* attrset of sections to an attrset of key-value pairs.
|
||||
*
|
||||
* generators.toINI {} {
|
||||
* foo = { hi = "${pkgs.hello}"; ciao = "bar"; };
|
||||
* baz = { "also, integers" = 42; };
|
||||
* }
|
||||
*
|
||||
*> [baz]
|
||||
*> also, integers=42
|
||||
*>
|
||||
*> [foo]
|
||||
*> ciao=bar
|
||||
*> hi=/nix/store/y93qql1p5ggfnaqjjqhxcw0vqw95rlz0-hello-2.10
|
||||
*
|
||||
* The mk* configuration attributes can generically change
|
||||
* the way sections and key-value strings are generated.
|
||||
*
|
||||
* For more examples see the test cases in ./tests.nix.
|
||||
*/
|
||||
toINI = {
|
||||
# apply transformations (e.g. escapes) to section names
|
||||
mkSectionName ? (name: libStr.escape [ "[" "]" ] name),
|
||||
# format a setting line from key and value
|
||||
mkKeyValue ? mkKeyValueDefault "="
|
||||
}: attrsOfAttrs:
|
||||
let
|
||||
# map function to string for each key val
|
||||
mapAttrsToStringsSep = sep: mapFn: attrs:
|
||||
libStr.concatStringsSep sep
|
||||
(libAttr.mapAttrsToList mapFn attrs);
|
||||
mkSection = sectName: sectValues: ''
|
||||
[${mkSectionName sectName}]
|
||||
'' + toKeyValue { inherit mkKeyValue; } sectValues;
|
||||
in
|
||||
# map input to ini sections
|
||||
mapAttrsToStringsSep "\n" mkSection attrsOfAttrs;
|
||||
|
||||
|
||||
/* Generates JSON from an arbitrary (non-function) value.
|
||||
* For more information see the documentation of the builtin.
|
||||
*/
|
||||
toJSON = {}: builtins.toJSON;
|
||||
|
||||
|
||||
/* YAML has been a strict superset of JSON since 1.2, so we
|
||||
* use toJSON. Before it only had a few differences referring
|
||||
* to implicit typing rules, so it should work with older
|
||||
* parsers as well.
|
||||
*/
|
||||
toYAML = {}@args: toJSON args;
|
||||
}
|
||||
117
lib/licenses.nix
117
lib/licenses.nix
@@ -65,11 +65,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
|
||||
fullName = "Boost Software License 1.0";
|
||||
};
|
||||
|
||||
beerware = spdx {
|
||||
spdxId = "Beerware";
|
||||
fullName = ''Beerware License'';
|
||||
};
|
||||
|
||||
bsd2 = spdx {
|
||||
spdxId = "BSD-2-Clause";
|
||||
fullName = ''BSD 2-clause "Simplified" License'';
|
||||
@@ -90,36 +85,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
|
||||
fullName = "Creative Commons Zero v1.0 Universal";
|
||||
};
|
||||
|
||||
cc-by-nc-sa-20 = spdx {
|
||||
spdxId = "CC-BY-NC-SA-2.0";
|
||||
fullName = "Creative Commons Attribution Non Commercial Share Alike 2.0";
|
||||
};
|
||||
|
||||
cc-by-nc-sa-25 = spdx {
|
||||
spdxId = "CC-BY-NC-SA-2.5";
|
||||
fullName = "Creative Commons Attribution Non Commercial Share Alike 2.5";
|
||||
};
|
||||
|
||||
cc-by-nc-sa-30 = spdx {
|
||||
spdxId = "CC-BY-NC-SA-3.0";
|
||||
fullName = "Creative Commons Attribution Non Commercial Share Alike 3.0";
|
||||
};
|
||||
|
||||
cc-by-nc-sa-40 = spdx {
|
||||
spdxId = "CC-BY-NC-SA-4.0";
|
||||
fullName = "Creative Commons Attribution Non Commercial Share Alike 4.0";
|
||||
};
|
||||
|
||||
cc-by-nd-30 = spdx {
|
||||
spdxId = "CC-BY-ND-3.0";
|
||||
fullName = "Creative Commons Attribution-No Derivative Works v3.00";
|
||||
};
|
||||
|
||||
cc-by-sa-25 = spdx {
|
||||
spdxId = "CC-BY-SA-2.5";
|
||||
fullName = "Creative Commons Attribution Share Alike 2.5";
|
||||
};
|
||||
|
||||
cc-by-30 = spdx {
|
||||
spdxId = "CC-BY-3.0";
|
||||
fullName = "Creative Commons Attribution 3.0";
|
||||
@@ -135,11 +100,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
|
||||
fullName = "Creative Commons Attribution 4.0";
|
||||
};
|
||||
|
||||
cc-by-sa-40 = spdx {
|
||||
spdxId = "CC-BY-SA-4.0";
|
||||
fullName = "Creative Commons Attribution Share Alike 4.0";
|
||||
};
|
||||
|
||||
cddl = spdx {
|
||||
spdxId = "CDDL-1.0";
|
||||
fullName = "Common Development and Distribution License 1.0";
|
||||
@@ -165,57 +125,15 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
|
||||
fullName = "Common Public License 1.0";
|
||||
};
|
||||
|
||||
doc = spdx {
|
||||
spdxId = "DOC";
|
||||
fullName = "DOC License";
|
||||
};
|
||||
|
||||
efl10 = spdx {
|
||||
spdxId = "EFL-1.0";
|
||||
fullName = "Eiffel Forum License v1.0";
|
||||
};
|
||||
|
||||
efl20 = spdx {
|
||||
spdxId = "EFL-2.0";
|
||||
fullName = "Eiffel Forum License v2.0";
|
||||
};
|
||||
|
||||
epl10 = spdx {
|
||||
spdxId = "EPL-1.0";
|
||||
fullName = "Eclipse Public License 1.0";
|
||||
};
|
||||
|
||||
epson = {
|
||||
fullName = "Seiko Epson Corporation Software License Agreement for Linux";
|
||||
url = https://download.ebz.epson.net/dsc/du/02/eula/global/LINUX_EN.html;
|
||||
free = false;
|
||||
};
|
||||
|
||||
fdl12 = spdx {
|
||||
spdxId = "GFDL-1.2";
|
||||
fullName = "GNU Free Documentation License v1.2";
|
||||
};
|
||||
|
||||
fdl13 = spdx {
|
||||
spdxId = "GFDL-1.3";
|
||||
fullName = "GNU Free Documentation License v1.3";
|
||||
};
|
||||
|
||||
free = {
|
||||
fullName = "Unspecified free software license";
|
||||
};
|
||||
|
||||
g4sl = {
|
||||
fullName = "Geant4 Software License";
|
||||
url = https://geant4.web.cern.ch/geant4/license/LICENSE.html;
|
||||
};
|
||||
|
||||
geogebra = {
|
||||
fullName = "GeoGebra Non-Commercial License Agreement";
|
||||
url = https://www.geogebra.org/license;
|
||||
free = false;
|
||||
};
|
||||
|
||||
gpl1 = spdx {
|
||||
spdxId = "GPL-1.0";
|
||||
fullName = "GNU General Public License v1.0 only";
|
||||
@@ -267,11 +185,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
|
||||
url = http://www.calculate-linux.org/packages/licenses/iASL;
|
||||
};
|
||||
|
||||
ijg = spdx {
|
||||
spdxId = "IJG";
|
||||
fullName = "Independent JPEG Group License";
|
||||
};
|
||||
|
||||
inria = {
|
||||
fullName = "INRIA Non-Commercial License Agreement";
|
||||
url = "http://compcert.inria.fr/doc/LICENSE";
|
||||
@@ -359,11 +272,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
|
||||
fullName = "MIT License";
|
||||
};
|
||||
|
||||
mpl10 = spdx {
|
||||
spdxId = "MPL-1.0";
|
||||
fullName = "Mozilla Public License 1.0";
|
||||
};
|
||||
|
||||
mpl11 = spdx {
|
||||
spdxId = "MPL-1.1";
|
||||
fullName = "Mozilla Public License 1.1";
|
||||
@@ -384,21 +292,11 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
|
||||
fullName = "University of Illinois/NCSA Open Source License";
|
||||
};
|
||||
|
||||
notion_lgpl = {
|
||||
url = "https://raw.githubusercontent.com/raboof/notion/master/LICENSE";
|
||||
fullName = "Notion modified LGPL";
|
||||
};
|
||||
|
||||
ofl = spdx {
|
||||
spdxId = "OFL-1.1";
|
||||
fullName = "SIL Open Font License 1.1";
|
||||
};
|
||||
|
||||
openldap = spdx {
|
||||
spdxId = "OLDAP-2.8";
|
||||
fullName = "Open LDAP Public License v2.8";
|
||||
};
|
||||
|
||||
openssl = spdx {
|
||||
spdxId = "OpenSSL";
|
||||
fullName = "OpenSSL License";
|
||||
@@ -454,11 +352,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
|
||||
fullName = "TCL/TK License";
|
||||
};
|
||||
|
||||
ufl = {
|
||||
fullName = "Ubuntu Font License 1.0";
|
||||
url = http://font.ubuntu.com/ufl/ubuntu-font-licence-1.0.txt;
|
||||
};
|
||||
|
||||
unfree = {
|
||||
fullName = "Unfree";
|
||||
free = false;
|
||||
@@ -480,16 +373,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
|
||||
fullName = "The Unlicense";
|
||||
};
|
||||
|
||||
upl = {
|
||||
fullName = "Universal Permissive License";
|
||||
url = "https://oss.oracle.com/licenses/upl/";
|
||||
};
|
||||
|
||||
vim = spdx {
|
||||
spdxId = "Vim";
|
||||
fullName = "Vim License";
|
||||
};
|
||||
|
||||
vsl10 = spdx {
|
||||
spdxId = "VSL-1.0";
|
||||
fullName = "Vovida Software License v1.0";
|
||||
|
||||
445
lib/lists.nix
445
lib/lists.nix
@@ -4,28 +4,19 @@ with import ./trivial.nix;
|
||||
|
||||
rec {
|
||||
|
||||
inherit (builtins) head tail length isList elemAt concatLists filter elem genList;
|
||||
inherit (builtins) head tail length isList elemAt concatLists filter elem;
|
||||
|
||||
/* Create a list consisting of a single element. `singleton x' is
|
||||
sometimes more convenient with respect to indentation than `[x]'
|
||||
when x spans multiple lines.
|
||||
|
||||
Example:
|
||||
singleton "foo"
|
||||
=> [ "foo" ]
|
||||
*/
|
||||
# Create a list consisting of a single element. `singleton x' is
|
||||
# sometimes more convenient with respect to indentation than `[x]'
|
||||
# when x spans multiple lines.
|
||||
singleton = x: [x];
|
||||
|
||||
/* "Fold" a binary function `op' between successive elements of
|
||||
`list' with `nul' as the starting value, i.e., `fold op nul [x_1
|
||||
x_2 ... x_n] == op x_1 (op x_2 ... (op x_n nul))'. (This is
|
||||
Haskell's foldr).
|
||||
|
||||
Example:
|
||||
concat = fold (a: b: a + b) "z"
|
||||
concat [ "a" "b" "c" ]
|
||||
=> "abcz"
|
||||
*/
|
||||
# "Fold" a binary function `op' between successive elements of
|
||||
# `list' with `nul' as the starting value, i.e., `fold op nul [x_1
|
||||
# x_2 ... x_n] == op x_1 (op x_2 ... (op x_n nul))'. (This is
|
||||
# Haskell's foldr).
|
||||
fold = op: nul: list:
|
||||
let
|
||||
len = length list;
|
||||
@@ -35,14 +26,8 @@ rec {
|
||||
else op (elemAt list n) (fold' (n + 1));
|
||||
in fold' 0;
|
||||
|
||||
/* Left fold: `fold op nul [x_1 x_2 ... x_n] == op (... (op (op nul
|
||||
x_1) x_2) ... x_n)'.
|
||||
|
||||
Example:
|
||||
lconcat = foldl (a: b: a + b) "z"
|
||||
lconcat [ "a" "b" "c" ]
|
||||
=> "zabc"
|
||||
*/
|
||||
# Left fold: `fold op nul [x_1 x_2 ... x_n] == op (... (op (op nul
|
||||
# x_1) x_2) ... x_n)'.
|
||||
foldl = op: nul: list:
|
||||
let
|
||||
len = length list;
|
||||
@@ -52,301 +37,125 @@ rec {
|
||||
else op (foldl' (n - 1)) (elemAt list n);
|
||||
in foldl' (length list - 1);
|
||||
|
||||
/* Strict version of foldl.
|
||||
|
||||
The difference is that evaluation is forced upon access. Usually used
|
||||
with small whole results (in contract with lazily-generated list or large
|
||||
lists where only a part is consumed.)
|
||||
*/
|
||||
foldl' = builtins.foldl' or foldl;
|
||||
# map with index: `imap (i: v: "${v}-${toString i}") ["a" "b"] ==
|
||||
# ["a-1" "b-2"]'
|
||||
imap = f: list:
|
||||
let
|
||||
len = length list;
|
||||
imap' = n:
|
||||
if n == len
|
||||
then []
|
||||
else [ (f (n + 1) (elemAt list n)) ] ++ imap' (n + 1);
|
||||
in imap' 0;
|
||||
|
||||
/* Map with index
|
||||
|
||||
FIXME(zimbatm): why does this start to count at 1?
|
||||
|
||||
Example:
|
||||
imap (i: v: "${v}-${toString i}") ["a" "b"]
|
||||
=> [ "a-1" "b-2" ]
|
||||
*/
|
||||
imap = f: list: genList (n: f (n + 1) (elemAt list n)) (length list);
|
||||
|
||||
/* Map and concatenate the result.
|
||||
|
||||
Example:
|
||||
concatMap (x: [x] ++ ["z"]) ["a" "b"]
|
||||
=> [ "a" "z" "b" "z" ]
|
||||
*/
|
||||
# Map and concatenate the result.
|
||||
concatMap = f: list: concatLists (map f list);
|
||||
|
||||
/* Flatten the argument into a single list; that is, nested lists are
|
||||
spliced into the top-level lists.
|
||||
|
||||
Example:
|
||||
flatten [1 [2 [3] 4] 5]
|
||||
=> [1 2 3 4 5]
|
||||
flatten 1
|
||||
=> [1]
|
||||
*/
|
||||
# Flatten the argument into a single list; that is, nested lists are
|
||||
# spliced into the top-level lists. E.g., `flatten [1 [2 [3] 4] 5]
|
||||
# == [1 2 3 4 5]' and `flatten 1 == [1]'.
|
||||
flatten = x:
|
||||
if isList x
|
||||
then concatMap (y: flatten y) x
|
||||
then fold (x: y: (flatten x) ++ y) [] x
|
||||
else [x];
|
||||
|
||||
/* Remove elements equal to 'e' from a list. Useful for buildInputs.
|
||||
|
||||
Example:
|
||||
remove 3 [ 1 3 4 3 ]
|
||||
=> [ 1 4 ]
|
||||
*/
|
||||
# Remove elements equal to 'e' from a list. Useful for buildInputs.
|
||||
remove = e: filter (x: x != e);
|
||||
|
||||
/* Find the sole element in the list matching the specified
|
||||
predicate, returns `default' if no such element exists, or
|
||||
`multiple' if there are multiple matching elements.
|
||||
|
||||
Example:
|
||||
findSingle (x: x == 3) "none" "multiple" [ 1 3 3 ]
|
||||
=> "multiple"
|
||||
findSingle (x: x == 3) "none" "multiple" [ 1 3 ]
|
||||
=> 3
|
||||
findSingle (x: x == 3) "none" "multiple" [ 1 9 ]
|
||||
=> "none"
|
||||
*/
|
||||
# Find the sole element in the list matching the specified
|
||||
# predicate, returns `default' if no such element exists, or
|
||||
# `multiple' if there are multiple matching elements.
|
||||
findSingle = pred: default: multiple: list:
|
||||
let found = filter pred list; len = length found;
|
||||
in if len == 0 then default
|
||||
else if len != 1 then multiple
|
||||
else head found;
|
||||
|
||||
/* Find the first element in the list matching the specified
|
||||
predicate or returns `default' if no such element exists.
|
||||
|
||||
Example:
|
||||
findFirst (x: x > 3) 7 [ 1 6 4 ]
|
||||
=> 6
|
||||
findFirst (x: x > 9) 7 [ 1 6 4 ]
|
||||
=> 7
|
||||
*/
|
||||
# Find the first element in the list matching the specified
|
||||
# predicate or returns `default' if no such element exists.
|
||||
findFirst = pred: default: list:
|
||||
let found = filter pred list;
|
||||
in if found == [] then default else head found;
|
||||
|
||||
/* Return true iff function `pred' returns true for at least element
|
||||
of `list'.
|
||||
|
||||
Example:
|
||||
any isString [ 1 "a" { } ]
|
||||
=> true
|
||||
any isString [ 1 { } ]
|
||||
=> false
|
||||
*/
|
||||
any = builtins.any or (pred: fold (x: y: if pred x then true else y) false);
|
||||
# Return true iff function `pred' returns true for at least element
|
||||
# of `list'.
|
||||
any = pred: fold (x: y: if pred x then true else y) false;
|
||||
|
||||
/* Return true iff function `pred' returns true for all elements of
|
||||
`list'.
|
||||
|
||||
Example:
|
||||
all (x: x < 3) [ 1 2 ]
|
||||
=> true
|
||||
all (x: x < 3) [ 1 2 3 ]
|
||||
=> false
|
||||
*/
|
||||
all = builtins.all or (pred: fold (x: y: if pred x then y else false) true);
|
||||
# Return true iff function `pred' returns true for all elements of
|
||||
# `list'.
|
||||
all = pred: fold (x: y: if pred x then y else false) true;
|
||||
|
||||
/* Count how many times function `pred' returns true for the elements
|
||||
of `list'.
|
||||
|
||||
Example:
|
||||
count (x: x == 3) [ 3 2 3 4 6 ]
|
||||
=> 2
|
||||
*/
|
||||
count = pred: foldl' (c: x: if pred x then c + 1 else c) 0;
|
||||
# Count how many times function `pred' returns true for the elements
|
||||
# of `list'.
|
||||
count = pred: fold (x: c: if pred x then c + 1 else c) 0;
|
||||
|
||||
/* Return a singleton list or an empty list, depending on a boolean
|
||||
value. Useful when building lists with optional elements
|
||||
(e.g. `++ optional (system == "i686-linux") flashplayer').
|
||||
|
||||
Example:
|
||||
optional true "foo"
|
||||
=> [ "foo" ]
|
||||
optional false "foo"
|
||||
=> [ ]
|
||||
*/
|
||||
# Return a singleton list or an empty list, depending on a boolean
|
||||
# value. Useful when building lists with optional elements
|
||||
# (e.g. `++ optional (system == "i686-linux") flashplayer').
|
||||
optional = cond: elem: if cond then [elem] else [];
|
||||
|
||||
/* Return a list or an empty list, dependening on a boolean value.
|
||||
|
||||
Example:
|
||||
optionals true [ 2 3 ]
|
||||
=> [ 2 3 ]
|
||||
optionals false [ 2 3 ]
|
||||
=> [ ]
|
||||
*/
|
||||
# Return a list or an empty list, dependening on a boolean value.
|
||||
optionals = cond: elems: if cond then elems else [];
|
||||
|
||||
|
||||
/* If argument is a list, return it; else, wrap it in a singleton
|
||||
list. If you're using this, you should almost certainly
|
||||
reconsider if there isn't a more "well-typed" approach.
|
||||
|
||||
Example:
|
||||
toList [ 1 2 ]
|
||||
=> [ 1 2 ]
|
||||
toList "hi"
|
||||
=> [ "hi "]
|
||||
*/
|
||||
# If argument is a list, return it; else, wrap it in a singleton
|
||||
# list. If you're using this, you should almost certainly
|
||||
# reconsider if there isn't a more "well-typed" approach.
|
||||
toList = x: if isList x then x else [x];
|
||||
|
||||
/* Return a list of integers from `first' up to and including `last'.
|
||||
|
||||
Example:
|
||||
range 2 4
|
||||
=> [ 2 3 4 ]
|
||||
range 3 2
|
||||
=> [ ]
|
||||
*/
|
||||
# Return a list of integers from `first' up to and including `last'.
|
||||
range = first: last:
|
||||
if first > last then
|
||||
[]
|
||||
else
|
||||
genList (n: first + n) (last - first + 1);
|
||||
if last < first
|
||||
then []
|
||||
else [first] ++ range (first + 1) last;
|
||||
|
||||
/* Splits the elements of a list in two lists, `right' and
|
||||
`wrong', depending on the evaluation of a predicate.
|
||||
|
||||
Example:
|
||||
partition (x: x > 2) [ 5 1 2 3 4 ]
|
||||
=> { right = [ 5 3 4 ]; wrong = [ 1 2 ]; }
|
||||
*/
|
||||
partition = builtins.partition or (pred:
|
||||
# Partition the elements of a list in two lists, `right' and
|
||||
# `wrong', depending on the evaluation of a predicate.
|
||||
partition = pred:
|
||||
fold (h: t:
|
||||
if pred h
|
||||
then { right = [h] ++ t.right; wrong = t.wrong; }
|
||||
else { right = t.right; wrong = [h] ++ t.wrong; }
|
||||
) { right = []; wrong = []; });
|
||||
) { right = []; wrong = []; };
|
||||
|
||||
/* Merges two lists of the same size together. If the sizes aren't the same
|
||||
the merging stops at the shortest. How both lists are merged is defined
|
||||
by the first argument.
|
||||
|
||||
Example:
|
||||
zipListsWith (a: b: a + b) ["h" "l"] ["e" "o"]
|
||||
=> ["he" "lo"]
|
||||
*/
|
||||
zipListsWith = f: fst: snd:
|
||||
genList
|
||||
(n: f (elemAt fst n) (elemAt snd n)) (min (length fst) (length snd));
|
||||
let
|
||||
len1 = length fst;
|
||||
len2 = length snd;
|
||||
len = if len1 < len2 then len1 else len2;
|
||||
zipListsWith' = n:
|
||||
if n != len then
|
||||
[ (f (elemAt fst n) (elemAt snd n)) ]
|
||||
++ zipListsWith' (n + 1)
|
||||
else [];
|
||||
in zipListsWith' 0;
|
||||
|
||||
/* Merges two lists of the same size together. If the sizes aren't the same
|
||||
the merging stops at the shortest.
|
||||
|
||||
Example:
|
||||
zipLists [ 1 2 ] [ "a" "b" ]
|
||||
=> [ { fst = 1; snd = "a"; } { fst = 2; snd = "b"; } ]
|
||||
*/
|
||||
zipLists = zipListsWith (fst: snd: { inherit fst snd; });
|
||||
|
||||
/* Reverse the order of the elements of a list.
|
||||
|
||||
Example:
|
||||
# Reverse the order of the elements of a list. FIXME: O(n^2)!
|
||||
reverseList = fold (e: acc: acc ++ [ e ]) [];
|
||||
|
||||
reverseList [ "b" "o" "j" ]
|
||||
=> [ "j" "o" "b" ]
|
||||
*/
|
||||
reverseList = xs:
|
||||
let l = length xs; in genList (n: elemAt xs (l - n - 1)) l;
|
||||
|
||||
/* Depth-First Search (DFS) for lists `list != []`.
|
||||
|
||||
`before a b == true` means that `b` depends on `a` (there's an
|
||||
edge from `b` to `a`).
|
||||
|
||||
Examples:
|
||||
|
||||
listDfs true hasPrefix [ "/home/user" "other" "/" "/home" ]
|
||||
== { minimal = "/"; # minimal element
|
||||
visited = [ "/home/user" ]; # seen elements (in reverse order)
|
||||
rest = [ "/home" "other" ]; # everything else
|
||||
}
|
||||
|
||||
listDfs true hasPrefix [ "/home/user" "other" "/" "/home" "/" ]
|
||||
== { cycle = "/"; # cycle encountered at this element
|
||||
loops = [ "/" ]; # and continues to these elements
|
||||
visited = [ "/" "/home/user" ]; # elements leading to the cycle (in reverse order)
|
||||
rest = [ "/home" "other" ]; # everything else
|
||||
|
||||
*/
|
||||
|
||||
listDfs = stopOnCycles: before: list:
|
||||
let
|
||||
dfs' = us: visited: rest:
|
||||
let
|
||||
c = filter (x: before x us) visited;
|
||||
b = partition (x: before x us) rest;
|
||||
in if stopOnCycles && (length c > 0)
|
||||
then { cycle = us; loops = c; inherit visited rest; }
|
||||
else if length b.right == 0
|
||||
then # nothing is before us
|
||||
{ minimal = us; inherit visited rest; }
|
||||
else # grab the first one before us and continue
|
||||
dfs' (head b.right)
|
||||
([ us ] ++ visited)
|
||||
(tail b.right ++ b.wrong);
|
||||
in dfs' (head list) [] (tail list);
|
||||
|
||||
/* Sort a list based on a partial ordering using DFS. This
|
||||
implementation is O(N^2), if your ordering is linear, use `sort`
|
||||
instead.
|
||||
|
||||
`before a b == true` means that `b` should be after `a`
|
||||
in the result.
|
||||
|
||||
Examples:
|
||||
|
||||
toposort hasPrefix [ "/home/user" "other" "/" "/home" ]
|
||||
== { result = [ "/" "/home" "/home/user" "other" ]; }
|
||||
|
||||
toposort hasPrefix [ "/home/user" "other" "/" "/home" "/" ]
|
||||
== { cycle = [ "/home/user" "/" "/" ]; # path leading to a cycle
|
||||
loops = [ "/" ]; } # loops back to these elements
|
||||
|
||||
toposort hasPrefix [ "other" "/home/user" "/home" "/" ]
|
||||
== { result = [ "other" "/" "/home" "/home/user" ]; }
|
||||
|
||||
toposort (a: b: a < b) [ 3 2 1 ] == { result = [ 1 2 3 ]; }
|
||||
|
||||
*/
|
||||
|
||||
toposort = before: list:
|
||||
let
|
||||
dfsthis = listDfs true before list;
|
||||
toporest = toposort before (dfsthis.visited ++ dfsthis.rest);
|
||||
in
|
||||
if length list < 2
|
||||
then # finish
|
||||
{ result = list; }
|
||||
else if dfsthis ? "cycle"
|
||||
then # there's a cycle, starting from the current vertex, return it
|
||||
{ cycle = reverseList ([ dfsthis.cycle ] ++ dfsthis.visited);
|
||||
inherit (dfsthis) loops; }
|
||||
else if toporest ? "cycle"
|
||||
then # there's a cycle somewhere else in the graph, return it
|
||||
toporest
|
||||
# Slow, but short. Can be made a bit faster with an explicit stack.
|
||||
else # there are no cycles
|
||||
{ result = [ dfsthis.minimal ] ++ toporest.result; };
|
||||
|
||||
/* Sort a list based on a comparator function which compares two
|
||||
elements and returns true if the first argument is strictly below
|
||||
the second argument. The returned list is sorted in an increasing
|
||||
order. The implementation does a quick-sort.
|
||||
|
||||
Example:
|
||||
sort (a: b: a < b) [ 5 3 7 ]
|
||||
=> [ 3 5 7 ]
|
||||
*/
|
||||
sort = builtins.sort or (
|
||||
strictLess: list:
|
||||
# Sort a list based on a comparator function which compares two
|
||||
# elements and returns true if the first argument is strictly below
|
||||
# the second argument. The returned list is sorted in an increasing
|
||||
# order. The implementation does a quick-sort.
|
||||
sort = strictLess: list:
|
||||
let
|
||||
len = length list;
|
||||
first = head list;
|
||||
@@ -360,75 +169,61 @@ rec {
|
||||
pivot = pivot' 1 { left = []; right = []; };
|
||||
in
|
||||
if len < 2 then list
|
||||
else (sort strictLess pivot.left) ++ [ first ] ++ (sort strictLess pivot.right));
|
||||
else (sort strictLess pivot.left) ++ [ first ] ++ (sort strictLess pivot.right);
|
||||
|
||||
/* Return the first (at most) N elements of a list.
|
||||
|
||||
Example:
|
||||
take 2 [ "a" "b" "c" "d" ]
|
||||
=> [ "a" "b" ]
|
||||
take 2 [ ]
|
||||
=> [ ]
|
||||
*/
|
||||
take = count: sublist 0 count;
|
||||
# Return the first (at most) N elements of a list.
|
||||
take = count: list:
|
||||
let
|
||||
len = length list;
|
||||
take' = n:
|
||||
if n == len || n == count
|
||||
then []
|
||||
else
|
||||
[ (elemAt list n) ] ++ take' (n + 1);
|
||||
in take' 0;
|
||||
|
||||
/* Remove the first (at most) N elements of a list.
|
||||
|
||||
Example:
|
||||
drop 2 [ "a" "b" "c" "d" ]
|
||||
=> [ "c" "d" ]
|
||||
drop 2 [ ]
|
||||
=> [ ]
|
||||
*/
|
||||
drop = count: list: sublist count (length list) list;
|
||||
# Remove the first (at most) N elements of a list.
|
||||
drop = count: list:
|
||||
let
|
||||
len = length list;
|
||||
drop' = n:
|
||||
if n == -1 || n < count
|
||||
then []
|
||||
else
|
||||
drop' (n - 1) ++ [ (elemAt list n) ];
|
||||
in drop' (len - 1);
|
||||
|
||||
/* Return a list consisting of at most ‘count’ elements of ‘list’,
|
||||
starting at index ‘start’.
|
||||
|
||||
Example:
|
||||
sublist 1 3 [ "a" "b" "c" "d" "e" ]
|
||||
=> [ "b" "c" "d" ]
|
||||
sublist 1 3 [ ]
|
||||
=> [ ]
|
||||
*/
|
||||
sublist = start: count: list:
|
||||
let len = length list; in
|
||||
genList
|
||||
(n: elemAt list (n + start))
|
||||
(if start >= len then 0
|
||||
else if start + count > len then len - start
|
||||
else count);
|
||||
|
||||
/* Return the last element of a list.
|
||||
|
||||
Example:
|
||||
last [ 1 2 3 ]
|
||||
=> 3
|
||||
*/
|
||||
# Return the last element of a list.
|
||||
last = list:
|
||||
assert list != []; elemAt list (length list - 1);
|
||||
|
||||
/* Return all elements but the last
|
||||
|
||||
Example:
|
||||
init [ 1 2 3 ]
|
||||
=> [ 1 2 ]
|
||||
*/
|
||||
# Return all elements but the last
|
||||
init = list: assert list != []; take (length list - 1) list;
|
||||
|
||||
|
||||
/* FIXME(zimbatm) Not used anywhere
|
||||
*/
|
||||
# Zip two lists together.
|
||||
zipTwoLists = xs: ys:
|
||||
let
|
||||
len1 = length xs;
|
||||
len2 = length ys;
|
||||
len = if len1 < len2 then len1 else len2;
|
||||
zipTwoLists' = n:
|
||||
if n != len then
|
||||
[ { first = elemAt xs n; second = elemAt ys n; } ]
|
||||
++ zipTwoLists' (n + 1)
|
||||
else [];
|
||||
in zipTwoLists' 0;
|
||||
|
||||
|
||||
deepSeqList = xs: y: if any (x: deepSeq x false) xs then y else y;
|
||||
|
||||
crossLists = f: foldl (fs: args: concatMap (f: map f args) fs) [f];
|
||||
|
||||
|
||||
/* Remove duplicate elements from the list. O(n^2) complexity.
|
||||
|
||||
Example:
|
||||
|
||||
unique [ 3 2 3 4 ]
|
||||
=> [ 3 2 4 ]
|
||||
*/
|
||||
# Remove duplicate elements from the list
|
||||
unique = list:
|
||||
if list == [] then
|
||||
[]
|
||||
@@ -438,20 +233,4 @@ rec {
|
||||
xs = unique (drop 1 list);
|
||||
in [x] ++ remove x xs;
|
||||
|
||||
/* Intersects list 'e' and another list. O(nm) complexity.
|
||||
|
||||
Example:
|
||||
intersectLists [ 1 2 3 ] [ 6 3 2 ]
|
||||
=> [ 3 2 ]
|
||||
*/
|
||||
intersectLists = e: filter (x: elem x e);
|
||||
|
||||
/* Subtracts list 'e' from another list. O(nm) complexity.
|
||||
|
||||
Example:
|
||||
subtractLists [ 3 2 ] [ 1 2 3 4 5 3 ]
|
||||
=> [ 1 4 5 ]
|
||||
*/
|
||||
subtractLists = e: filter (x: !(elem x e));
|
||||
|
||||
}
|
||||
|
||||
@@ -1,69 +1,33 @@
|
||||
/* List of NixOS maintainers. The format is:
|
||||
/* -*- coding: utf-8; -*- */
|
||||
|
||||
handle = "Real Name <address@example.org>";
|
||||
|
||||
where <handle> is preferred to be your GitHub username (so it's easy
|
||||
to ping a package @<handle>), and <Real Name> is your real name, not
|
||||
a pseudonym. Please keep the list alphabetically sorted. */
|
||||
{
|
||||
a1russell = "Adam Russell <adamlr6+pub@gmail.com>";
|
||||
aaronschif = "Aaron Schif <aaronschif@gmail.com>";
|
||||
abaldeau = "Andreas Baldeau <andreas@baldeau.net>";
|
||||
/* Add your name and email address here. Keep the list
|
||||
alphabetically sorted. */
|
||||
|
||||
_1126 = "Christian Lask <mail@elfsechsundzwanzig.de>";
|
||||
abbradar = "Nikolay Amiantov <ab@fmap.me>";
|
||||
abigailbuccaneer = "Abigail Bunyan <abigailbuccaneer@gmail.com>";
|
||||
aboseley = "Adam Boseley <adam.boseley@gmail.com>";
|
||||
abuibrahim = "Ruslan Babayev <ruslan@babayev.com>";
|
||||
acowley = "Anthony Cowley <acowley@gmail.com>";
|
||||
adev = "Adrien Devresse <adev@adev.name>";
|
||||
Adjective-Object = "Maxwell Huang-Hobbs <mhuan13@gmail.com>";
|
||||
adnelson = "Allen Nelson <ithinkican@gmail.com>";
|
||||
adolfogc = "Adolfo E. García Castro <adolfo.garcia.cr@gmail.com>";
|
||||
aespinosa = "Allan Espinosa <allan.espinosa@outlook.com>";
|
||||
aflatter = "Alexander Flatter <flatter@fastmail.fm>";
|
||||
aforemny = "Alexander Foremny <alexanderforemny@googlemail.com>";
|
||||
afranchuk = "Alex Franchuk <alex.franchuk@gmail.com>";
|
||||
aherrmann = "Andreas Herrmann <andreash87@gmx.ch>";
|
||||
ak = "Alexander Kjeldaas <ak@formalprivacy.com>";
|
||||
akaWolf = "Artjom Vejsel <akawolf0@gmail.com>";
|
||||
akc = "Anders Claesson <akc@akc.is>";
|
||||
algorith = "Dries Van Daele <dries_van_daele@telenet.be>";
|
||||
all = "Nix Committers <nix-commits@lists.science.uu.nl>";
|
||||
ambrop72 = "Ambroz Bizjak <ambrop7@gmail.com>";
|
||||
amiddelk = "Arie Middelkoop <amiddelk@gmail.com>";
|
||||
amiloradovsky = "Andrew Miloradovsky <miloradovsky@gmail.com>";
|
||||
amorsillo = "Andrew Morsillo <andrew.morsillo@gmail.com>";
|
||||
AndersonTorres = "Anderson Torres <torres.anderson.85@gmail.com>";
|
||||
anderspapitto = "Anders Papitto <anderspapitto@gmail.com>";
|
||||
andres = "Andres Loeh <ksnixos@andres-loeh.de>";
|
||||
andrewrk = "Andrew Kelley <superjoe30@gmail.com>";
|
||||
andsild = "Anders Sildnes <andsild@gmail.com>";
|
||||
aneeshusa = "Aneesh Agrawal <aneeshusa@gmail.com>";
|
||||
antono = "Antono Vasiljev <self@antono.info>";
|
||||
apeyroux = "Alexandre Peyroux <alex@px.io>";
|
||||
ardumont = "Antoine R. Dumont <eniotna.t@gmail.com>";
|
||||
aristid = "Aristid Breitkreuz <aristidb@gmail.com>";
|
||||
arobyn = "Alexei Robyn <shados@shados.net>";
|
||||
artuuge = "Artur E. Ruuge <artuuge@gmail.com>";
|
||||
ashalkhakov = "Artyom Shalkhakov <artyom.shalkhakov@gmail.com>";
|
||||
aske = "Kirill Boltaev <aske@fmap.me>";
|
||||
asppsa = "Alastair Pharo <asppsa@gmail.com>";
|
||||
astsmtl = "Alexander Tsamutali <astsmtl@yandex.ru>";
|
||||
asymmetric = "Lorenzo Manacorda <lorenzo@mailbox.org>";
|
||||
aszlig = "aszlig <aszlig@redmoonstudios.org>";
|
||||
auntie = "Jonathan Glines <auntieNeo@gmail.com>";
|
||||
avnik = "Alexander V. Nikolaev <avn@avnik.info>";
|
||||
aycanirican = "Aycan iRiCAN <iricanaycan@gmail.com>";
|
||||
bachp = "Pascal Bach <pascal.bach@nextrem.ch>";
|
||||
badi = "Badi' Abdul-Wahid <abdulwahidc@gmail.com>";
|
||||
balajisivaraman = "Balaji Sivaraman<sivaraman.balaji@gmail.com>";
|
||||
Baughn = "Svein Ove Aas <sveina@gmail.com>";
|
||||
bbenoist = "Baptist BENOIST <return_0@live.com>";
|
||||
bcarrell = "Brandon Carrell <brandoncarrell@gmail.com>";
|
||||
bcdarwin = "Ben Darwin <bcdarwin@gmail.com>";
|
||||
bdimcheff = "Brandon Dimcheff <brandon@dimcheff.com>";
|
||||
benley = "Benjamin Staffin <benley@gmail.com>";
|
||||
bennofs = "Benno Fünfstück <benno.fuenfstueck@gmail.com>";
|
||||
benwbooth = "Ben Booth <benwbooth@gmail.com>";
|
||||
berdario = "Dario Bertini <berdario@gmail.com>";
|
||||
bergey = "Daniel Bergey <bergey@teallabs.org>";
|
||||
bjg = "Brian Gough <bjg@gnu.org>";
|
||||
@@ -71,439 +35,159 @@
|
||||
bluescreen303 = "Mathijs Kwik <mathijs@bluescreen303.nl>";
|
||||
bobvanderlinden = "Bob van der Linden <bobvanderlinden@gmail.com>";
|
||||
bodil = "Bodil Stokke <nix@bodil.org>";
|
||||
boothead = "Ben Ford <ben@perurbis.com>";
|
||||
bosu = "Boris Sukholitko <boriss@gmail.com>";
|
||||
bradediger = "Brad Ediger <brad@bradediger.com>";
|
||||
bramd = "Bram Duvigneau <bram@bramd.nl>";
|
||||
bstrik = "Berno Strik <dutchman55@gmx.com>";
|
||||
bzizou = "Bruno Bzeznik <Bruno@bzizou.net>";
|
||||
c0dehero = "CodeHero <codehero@nerdpol.ch>";
|
||||
calrama = "Moritz Maxeiner <moritz@ucworks.org>";
|
||||
campadrenalin = "Philip Horger <campadrenalin@gmail.com>";
|
||||
carlsverre = "Carl Sverre <accounts@carlsverre.com>";
|
||||
cdepillabout = "Dennis Gosnell <cdep.illabout@gmail.com>";
|
||||
cfouche = "Chaddaï Fouché <chaddai.fouche@gmail.com>";
|
||||
chaoflow = "Florian Friesdorf <flo@chaoflow.net>";
|
||||
chattered = "Phil Scott <me@philscotted.com>";
|
||||
choochootrain = "Hurshal Patel <hurshal@imap.cc>";
|
||||
chris-martin = "Chris Martin <ch.martin@gmail.com>";
|
||||
chrisjefferson = "Christopher Jefferson <chris@bubblescope.net>";
|
||||
christopherpoole = "Christopher Mark Poole <mail@christopherpoole.net>";
|
||||
ckampka = "Christian Kampka <christian@kampka.net>";
|
||||
cko = "Christine Koppelt <christine.koppelt@gmail.com>";
|
||||
cleverca22 = "Michael Bishop <cleverca22@gmail.com>";
|
||||
cmcdragonkai = "Roger Qiu <roger.qiu@matrix.ai>";
|
||||
cmfwyp = "cmfwyp <cmfwyp@riseup.net>";
|
||||
coconnor = "Corey O'Connor <coreyoconnor@gmail.com>";
|
||||
codsl = "codsl <codsl@riseup.net>";
|
||||
codyopel = "Cody Opel <codyopel@gmail.com>";
|
||||
colemickens = "Cole Mickens <cole.mickens@gmail.com>";
|
||||
copumpkin = "Dan Peebles <pumpkingod@gmail.com>";
|
||||
corngood = "David McFarland <corngood@gmail.com>";
|
||||
coroa = "Jonas Hörsch <jonas@chaoflow.net>";
|
||||
couchemar = "Andrey Pavlov <couchemar@yandex.ru>";
|
||||
cransom = "Casey Ransom <cransom@hubns.net>";
|
||||
cryptix = "Henry Bubert <cryptix@riseup.net>";
|
||||
CrystalGamma = "Jona Stubbe <nixos@crystalgamma.de>";
|
||||
cstrahan = "Charles Strahan <charles@cstrahan.com>";
|
||||
cwoac = "Oliver Matthews <oliver@codersoffortune.net>";
|
||||
DamienCassou = "Damien Cassou <damien@cassou.me>";
|
||||
danbst = "Danylo Hlynskyi <abcz2.uprola@gmail.com>";
|
||||
dasuxullebt = "Christoph-Simon Senjak <christoph.senjak@googlemail.com>";
|
||||
davidak = "David Kleuker <post@davidak.de>";
|
||||
cstrahan = "Charles Strahan <charles.c.strahan@gmail.com>";
|
||||
DamienCassou = "Damien Cassou <damien.cassou@gmail.com>";
|
||||
davidrusu = "David Rusu <davidrusu.me@gmail.com>";
|
||||
davorb = "Davor Babic <davor@davor.se>";
|
||||
dbohdan = "Danyil Bohdan <danyil.bohdan@gmail.com>";
|
||||
dbrock = "Daniel Brockman <daniel@brockman.se>";
|
||||
deepfire = "Kosyrev Serge <_deepfire@feelingofgreen.ru>";
|
||||
demin-dmitriy = "Dmitriy Demin <demindf@gmail.com>";
|
||||
DerGuteMoritz = "Moritz Heidkamp <moritz@twoticketsplease.de>";
|
||||
DerTim1 = "Tim Digel <tim.digel@active-group.de>";
|
||||
desiderius = "Didier J. Devroye <didier@devroye.name>";
|
||||
devhell = "devhell <\"^\"@regexmail.net>";
|
||||
dezgeg = "Tuomas Tynkkynen <tuomas.tynkkynen@iki.fi>";
|
||||
dfoxfranke = "Daniel Fox Franke <dfoxfranke@gmail.com>";
|
||||
dgonyeo = "Derek Gonyeo <derek@gonyeo.com>";
|
||||
dipinhora = "Dipin Hora <dipinhora+github@gmail.com>";
|
||||
dmalikov = "Dmitry Malikov <malikov.d.y@gmail.com>";
|
||||
dochang = "Desmond O. Chang <dochang@gmail.com>";
|
||||
domenkozar = "Domen Kozar <domen@dev.si>";
|
||||
doublec = "Chris Double <chris.double@double.co.nz>";
|
||||
drets = "Dmytro Rets <dmitryrets@gmail.com>";
|
||||
drewkett = "Andrew Burkett <burkett.andrew@gmail.com>";
|
||||
dtzWill = "Will Dietz <nix@wdtz.org>";
|
||||
e-user = "Alexander Kahl <nixos@sodosopa.io>";
|
||||
ebzzry = "Rommel Martinez <ebzzry@gmail.com>";
|
||||
ederoyd46 = "Matthew Brown <matt@ederoyd.co.uk>";
|
||||
eduarrrd = "Eduard Bachmakov <e.bachmakov@gmail.com>";
|
||||
edwtjo = "Edward Tjörnhammar <ed@cflags.cc>";
|
||||
eelco = "Eelco Dolstra <eelco.dolstra@logicblox.com>";
|
||||
ehegnes = "Eric Hegnes <eric.hegnes@gmail.com>";
|
||||
ehmry = "Emery Hemingway <emery@vfemail.net>";
|
||||
eikek = "Eike Kettner <eike.kettner@posteo.de>";
|
||||
elasticdog = "Aaron Bull Schaefer <aaron@elasticdog.com>";
|
||||
eleanor = "Dejan Lukan <dejan@proteansec.com>";
|
||||
elitak = "Eric Litak <elitak@gmail.com>";
|
||||
ellis = "Ellis Whitehead <nixos@ellisw.net>";
|
||||
epitrochoid = "Mabry Cervin <mpcervin@uncg.edu>";
|
||||
ericbmerritt = "Eric Merritt <eric@afiniate.com>";
|
||||
ericsagnes = "Eric Sagnes <eric.sagnes@gmail.com>";
|
||||
erikryb = "Erik Rybakken <erik.rybakken@math.ntnu.no>";
|
||||
ertes = "Ertugrul Söylemez <esz@posteo.de>";
|
||||
ethercrow = "Dmitry Ivanov <ethercrow@gmail.com>";
|
||||
exi = "Reno Reckling <nixos@reckling.org>";
|
||||
emery = "Emery Hemingway <emery@vfemail.net>";
|
||||
ertes = "Ertugrul Söylemez <ertesx@gmx.de>";
|
||||
exlevan = "Alexey Levan <exlevan@gmail.com>";
|
||||
expipiplus1 = "Joe Hermaszewski <nix@monoid.al>";
|
||||
fadenb = "Tristan Helmich <tristan.helmich+nixos@gmail.com>";
|
||||
falsifian = "James Cook <james.cook@utoronto.ca>";
|
||||
flosse = "Markus Kohlhase <mail@markus-kohlhase.de>";
|
||||
fluffynukeit = "Daniel Austin <dan@fluffynukeit.com>";
|
||||
fmthoma = "Franz Thoma <f.m.thoma@googlemail.com>";
|
||||
forkk = "Andrew Okin <forkk@forkk.net>";
|
||||
fornever = "Friedrich von Never <friedrich@fornever.me>";
|
||||
fpletz = "Franz Pletz <fpletz@fnordicwalking.de>";
|
||||
fps = "Florian Paul Schmidt <mista.tapas@gmx.net>";
|
||||
fridh = "Frederik Rietdijk <fridh@fridh.nl>";
|
||||
frlan = "Frank Lanitz <frank@frank.uvena.de>";
|
||||
fro_ozen = "fro_ozen <fro_ozen@gmx.de>";
|
||||
ftrvxmtrx = "Siarhei Zirukin <ftrvxmtrx@gmail.com>";
|
||||
funfunctor = "Edward O'Callaghan <eocallaghan@alterapraxis.com>";
|
||||
fuuzetsu = "Mateusz Kowalczyk <fuuzetsu@fuuzetsu.co.uk>";
|
||||
fxfactorial = "Edgar Aroutiounian <edgar.factorial@gmail.com>";
|
||||
gal_bolle = "Florent Becker <florent.becker@ens-lyon.org>";
|
||||
garbas = "Rok Garbas <rok@garbas.si>";
|
||||
garrison = "Jim Garrison <jim@garrison.cc>";
|
||||
gavin = "Gavin Rogers <gavin@praxeology.co.uk>";
|
||||
gebner = "Gabriel Ebner <gebner@gebner.org>";
|
||||
gilligan = "Tobias Pflug <tobias.pflug@gmail.com>";
|
||||
giogadi = "Luis G. Torres <lgtorres42@gmail.com>";
|
||||
gleber = "Gleb Peregud <gleber.p@gmail.com>";
|
||||
globin = "Robin Gloster <mail@glob.in>";
|
||||
gnidorah = "Alex Ivanov <yourbestfriend@opmbx.org>";
|
||||
goibhniu = "Cillian de Róiste <cillian.deroiste@gmail.com>";
|
||||
Gonzih = "Max Gonzih <gonzih@gmail.com>";
|
||||
goodrone = "Andrew Trachenko <goodrone@gmail.com>";
|
||||
gpyh = "Yacine Hmito <yacine.hmito@gmail.com>";
|
||||
grahamc = "Graham Christensen <graham@grahamc.com>";
|
||||
gridaphobe = "Eric Seidel <eric@seidel.io>";
|
||||
guibert = "David Guibert <david.guibert@gmail.com>";
|
||||
guillaumekoenig = "Guillaume Koenig <guillaume.edward.koenig@gmail.com>";
|
||||
guyonvarch = "Joris Guyonvarch <joris@guyonvarch.me>";
|
||||
hakuch = "Jesse Haber-Kucharsky <hakuch@gmail.com>";
|
||||
havvy = "Ryan Scheel <ryan.havvy@gmail.com>";
|
||||
hbunke = "Hendrik Bunke <bunke.hendrik@gmail.com>";
|
||||
hce = "Hans-Christian Esperer <hc@hcesperer.org>";
|
||||
henrytill = "Henry Till <henrytill@gmail.com>";
|
||||
hinton = "Tom Hinton <t@larkery.com>";
|
||||
hrdinka = "Christoph Hrdinka <c.nix@hrdinka.at>";
|
||||
iand675 = "Ian Duncan <ian@iankduncan.com>";
|
||||
ianwookim = "Ian-Woo Kim <ianwookim@gmail.com>";
|
||||
igsha = "Igor Sharonov <igor.sharonov@gmail.com>";
|
||||
ikervagyok = "Balázs Lengyel <ikervagyok@gmail.com>";
|
||||
j-keck = "Jürgen Keck <jhyphenkeck@gmail.com>";
|
||||
iElectric = "Domen Kozar <domen@dev.si>";
|
||||
iyzsong = "Song Wenwu <iyzsong@gmail.com>";
|
||||
jagajaga = "Arseniy Seroka <ars.seroka@gmail.com>";
|
||||
javaguirre = "Javier Aguirre <contacto@javaguirre.net>";
|
||||
jb55 = "William Casarin <bill@casarin.me>";
|
||||
jbedo = "Justin Bedő <cu@cua0.org>";
|
||||
jcumming = "Jack Cummings <jack@mudshark.org>";
|
||||
jdagilliland = "Jason Gilliland <jdagilliland@gmail.com>";
|
||||
jefdaj = "Jeffrey David Johnson <jefdaj@gmail.com>";
|
||||
jerith666 = "Matt McHenry <github@matt.mchenryfamily.org>";
|
||||
jfb = "James Felix Black <james@yamtime.com>";
|
||||
jgeerds = "Jascha Geerds <jascha@jgeerds.name>";
|
||||
jgillich = "Jakob Gillich <jakob@gillich.me>";
|
||||
jgeerds = "Jascha Geerds <jg@ekby.de>";
|
||||
jirkamarsik = "Jirka Marsik <jiri.marsik89@gmail.com>";
|
||||
joachifm = "Joachim Fasting <joachifm@fastmail.fm>";
|
||||
joamaki = "Jussi Maki <joamaki@gmail.com>";
|
||||
joelmo = "Joel Moberg <joel.moberg@gmail.com>";
|
||||
joelteon = "Joel Taylor <me@joelt.io>";
|
||||
joko = "Ioannis Koutras <ioannis.koutras@gmail.com>";
|
||||
jonafato = "Jon Banafato <jon@jonafato.com>";
|
||||
jpbernardy = "Jean-Philippe Bernardy <jeanphilippe.bernardy@gmail.com>";
|
||||
jraygauthier = "Raymond Gauthier <jraygauthier@gmail.com>";
|
||||
juliendehos = "Julien Dehos <dehos@lisic.univ-littoral.fr>";
|
||||
jwiegley = "John Wiegley <johnw@newartisans.com>";
|
||||
jwilberding = "Jordan Wilberding <jwilberding@afiniate.com>";
|
||||
jzellner = "Jeff Zellner <jeffz@eml.cc>";
|
||||
kaiha = "Kai Harries <kai.harries@gmail.com>";
|
||||
kamilchm = "Kamil Chmielewski <kamil.chm@gmail.com>";
|
||||
kampfschlaefer = "Arnold Krille <arnold@arnoldarts.de>";
|
||||
kevincox = "Kevin Cox <kevincox@kevincox.ca>";
|
||||
khumba = "Bryan Gardiner <bog@khumba.net>";
|
||||
KibaFox = "Kiba Fox <kiba.fox@foxypossibilities.com>";
|
||||
kierdavis = "Kier Davis <kierdavis@gmail.com>";
|
||||
kkallio = "Karn Kallio <tierpluspluslists@gmail.com>";
|
||||
koral = "Koral <koral@mailoo.org>";
|
||||
kovirobi = "Kovacsics Robert <kovirobi@gmail.com>";
|
||||
kragniz = "Louis Taylor <louis@kragniz.eu>";
|
||||
kragniz = "Louis Taylor <kragniz@gmail.com>";
|
||||
ktosiek = "Tomasz Kontusz <tomasz.kontusz@gmail.com>";
|
||||
lassulus = "Lassulus <lassulus@gmail.com>";
|
||||
layus = "Guillaume Maudoux <layus.on@gmail.com>";
|
||||
ldesgoui = "Lucas Desgouilles <ldesgoui@gmail.com>";
|
||||
lebastr = "Alexander Lebedev <lebastr@gmail.com>";
|
||||
leenaars = "Michiel Leenaars <ml.software@leenaa.rs>";
|
||||
leonardoce = "Leonardo Cecchi <leonardo.cecchi@gmail.com>";
|
||||
lethalman = "Luca Bruno <lucabru@src.gnome.org>";
|
||||
lewo = "Antoine Eiche <lewo@abesis.fr>";
|
||||
lheckemann = "Linus Heckemann <git@sphalerite.org>";
|
||||
lhvwb = "Nathaniel Baxter <nathaniel.baxter@gmail.com>";
|
||||
lihop = "Leroy Hopson <nixos@leroy.geek.nz>";
|
||||
linquize = "Linquize <linquize@yahoo.com.hk>";
|
||||
linus = "Linus Arver <linusarver@gmail.com>";
|
||||
lnl7 = "Daiderd Jordan <daiderd@gmail.com>";
|
||||
loskutov = "Ignat Loskutov <ignat.loskutov@gmail.com>";
|
||||
lovek323 = "Jason O'Conal <jason@oconal.id.au>";
|
||||
lowfatcomputing = "Andreas Wagner <andreas.wagner@lowfatcomputing.org>";
|
||||
lsix = "Lancelot SIX <lsix@lancelotsix.com>";
|
||||
lucas8 = "Luc Chabassier <luc.linux@mailoo.org>";
|
||||
ludo = "Ludovic Courtès <ludo@gnu.org>";
|
||||
luispedro = "Luis Pedro Coelho <luis@luispedro.org>";
|
||||
lukego = "Luke Gorrie <luke@snabb.co>";
|
||||
lw = "Sergey Sofeychuk <lw@fmap.me>";
|
||||
madjar = "Georges Dubus <georges.dubus@compiletoi.net>";
|
||||
magnetophon = "Bart Brouns <bart@magnetophon.nl>";
|
||||
mahe = "Matthias Herrmann <matthias.mh.herrmann@gmail.com>";
|
||||
makefu = "Felix Richter <makefu@syntax-fehler.de>";
|
||||
malyn = "Michael Alyn Miller <malyn@strangeGizmo.com>";
|
||||
manveru = "Michael Fellinger <m.fellinger@gmail.com>";
|
||||
marcweber = "Marc Weber <marco-oweber@gmx.de>";
|
||||
markus1189 = "Markus Hauck <markus1189@gmail.com>";
|
||||
markWot = "Markus Wotringer <markus@wotringer.de>";
|
||||
martijnvermaat = "Martijn Vermaat <martijn@vermaat.name>";
|
||||
martingms = "Martin Gammelsæter <martin@mg.am>";
|
||||
matejc = "Matej Cotman <cotman.matej@gmail.com>";
|
||||
mathnerd314 = "Mathnerd314 <mathnerd314.gph+hs@gmail.com>";
|
||||
matthewbauer = "Matthew Bauer <mjbauer95@gmail.com>";
|
||||
matthiasbeyer = "Matthias Beyer <mail@beyermatthias.de>";
|
||||
maurer = "Matthew Maurer <matthew.r.maurer+nix@gmail.com>";
|
||||
mbakke = "Marius Bakke <mbakke@fastmail.com>";
|
||||
mbbx6spp = "Susan Potter <me@susanpotter.net>";
|
||||
mbe = "Brandon Edens <brandonedens@gmail.com>";
|
||||
mboes = "Mathieu Boespflug <mboes@tweag.net>";
|
||||
mcmtroffaes = "Matthias C. M. Troffaes <matthias.troffaes@gmail.com>";
|
||||
mdaiter = "Matthew S. Daiter <mdaiter8121@gmail.com>";
|
||||
meditans = "Carlo Nucera <meditans@gmail.com>";
|
||||
meisternu = "Matt Miemiec <meister@krutt.org>";
|
||||
mguentner = "Maximilian Güntner <code@klandest.in>";
|
||||
mic92 = "Jörg Thalheim <joerg@higgsboson.tk>";
|
||||
michaelpj = "Michael Peyton Jones <michaelpj@gmail.com>";
|
||||
michalrus = "Michal Rus <m@michalrus.com>";
|
||||
michelk = "Michel Kuhlmann <michel@kuhlmanns.info>";
|
||||
mikefaille = "Michaël Faille <michael@faille.io>";
|
||||
mimadrid = "Miguel Madrid <mimadrid@ucm.es>";
|
||||
mingchuan = "Ming Chuan <ming@culpring.com>";
|
||||
mirdhyn = "Merlin Gaillard <mirdhyn@gmail.com>";
|
||||
mirrexagon = "Andrew Abbott <mirrexagon@mirrexagon.com>";
|
||||
mjanczyk = "Marcin Janczyk <m@dragonvr.pl>";
|
||||
mlieberman85 = "Michael Lieberman <mlieberman85@gmail.com>";
|
||||
modulistic = "Pablo Costa <modulistic@gmail.com>";
|
||||
mog = "Matthew O'Gorman <mog-lists@rldn.net>";
|
||||
montag451 = "montag451 <montag451@laposte.net>";
|
||||
moosingin3space = "Nathan Moos <moosingin3space@gmail.com>";
|
||||
moretea = "Maarten Hoogendoorn <maarten@moretea.nl>";
|
||||
mornfall = "Petr Ročkai <me@mornfall.net>";
|
||||
MostAwesomeDude = "Corbin Simpson <cds@corbinsimpson.com>";
|
||||
mounium = "Katona László <muoniurn@gmail.com>";
|
||||
MP2E = "Cray Elliott <MP2E@archlinux.us>";
|
||||
mpscholten = "Marc Scholten <marc@mpscholten.de>";
|
||||
mpsyco = "Francis St-Amour <fr.st-amour@gmail.com>";
|
||||
msackman = "Matthew Sackman <matthew@wellquite.org>";
|
||||
mschristiansen = "Mikkel Christiansen <mikkel@rheosystems.com>";
|
||||
msteen = "Matthijs Steen <emailmatthijs@gmail.com>";
|
||||
mtreskin = "Max Treskin <zerthurd@gmail.com>";
|
||||
mudri = "James Wood <lamudri@gmail.com>";
|
||||
muflax = "Stefan Dorn <mail@muflax.com>";
|
||||
myrl = "Myrl Hex <myrl.0xf@gmail.com>";
|
||||
nand0p = "Fernando Jose Pando <nando@hex7.com>";
|
||||
Nate-Devv = "Nathan Moore <natedevv@gmail.com>";
|
||||
nathan-gs = "Nathan Bijnens <nathan@nathan.gs>";
|
||||
nckx = "Tobias Geerinckx-Rice <tobias.geerinckx.rice@gmail.com>";
|
||||
nequissimus = "Tim Steinbach <tim@nequissimus.com>";
|
||||
nfjinjing = "Jinjing Wang <nfjinjing@gmail.com>";
|
||||
nhooyr = "Anmol Sethi <anmol@aubble.com>";
|
||||
nicknovitski = "Nick Novitski <nixpkgs@nicknovitski.com>";
|
||||
nico202 = "Nicolò Balzarotti <anothersms@gmail.com>";
|
||||
NikolaMandic = "Ratko Mladic <nikola@mandic.email>";
|
||||
notthemessiah = "Brian Cohen <brian.cohen.88@gmail.com>";
|
||||
np = "Nicolas Pouillard <np.nix@nicolaspouillard.fr>";
|
||||
nslqqq = "Nikita Mikhailov <nslqqq@gmail.com>";
|
||||
obadz = "obadz <obadz-nixos@obadz.com>";
|
||||
ocharles = "Oliver Charles <ollie@ocharles.org.uk>";
|
||||
odi = "Oliver Dunkl <oliver.dunkl@gmail.com>";
|
||||
offline = "Jaka Hudoklin <jakahudoklin@gmail.com>";
|
||||
okasu = "Okasu <oka.sux@gmail.com>";
|
||||
olcai = "Erik Timan <dev@timan.info>";
|
||||
olejorgenb = "Ole Jørgen Brønner <olejorgenb@yahoo.no>";
|
||||
orbekk = "KJ Ørbekk <kjetil.orbekk@gmail.com>";
|
||||
orbitz = "Malcolm Matalka <mmatalka@gmail.com>";
|
||||
osener = "Ozan Sener <ozan@ozansener.com>";
|
||||
otwieracz = "Slawomir Gonet <slawek@otwiera.cz>";
|
||||
oxij = "Jan Malakhovski <oxij@oxij.org>";
|
||||
page = "Carles Pagès <page@cubata.homelinux.net>";
|
||||
paholg = "Paho Lurie-Gregg <paho@paholg.com>";
|
||||
pakhfn = "Fedor Pakhomov <pakhfn@gmail.com>";
|
||||
palo = "Ingolf Wanger <palipalo9@googlemail.com>";
|
||||
pashev = "Igor Pashev <pashev.igor@gmail.com>";
|
||||
pawelpacana = "Paweł Pacana <pawel.pacana@gmail.com>";
|
||||
periklis = "theopompos@gmail.com";
|
||||
pesterhazy = "Paulus Esterhazy <pesterhazy@gmail.com>";
|
||||
peterhoeg = "Peter Hoeg <peter@hoeg.com>";
|
||||
peti = "Peter Simons <simons@cryp.to>";
|
||||
philandstuff = "Philip Potter <philip.g.potter@gmail.com>";
|
||||
phile314 = "Philipp Hausmann <nix@314.ch>";
|
||||
Phlogistique = "Noé Rubinstein <noe.rubinstein@gmail.com>";
|
||||
phreedom = "Evgeny Egorochkin <phreedom@yandex.ru>";
|
||||
phunehehe = "Hoang Xuan Phu <phunehehe@gmail.com>";
|
||||
pierron = "Nicolas B. Pierron <nixos@nbp.name>";
|
||||
piotr = "Piotr Pietraszkiewicz <ppietrasa@gmail.com>";
|
||||
pjbarnoy = "Perry Barnoy <pjbarnoy@gmail.com>";
|
||||
pjones = "Peter Jones <pjones@devalot.com>";
|
||||
pkmx = "Chih-Mao Chen <pkmx.tw@gmail.com>";
|
||||
plcplc = "Philip Lykke Carlsen <plcplc@gmail.com>";
|
||||
pmahoney = "Patrick Mahoney <pat@polycrystal.org>";
|
||||
pmiddend = "Philipp Middendorf <pmidden@secure.mailbox.org>";
|
||||
prikhi = "Pavan Rikhi <pavan.rikhi@gmail.com>";
|
||||
primeos = "Michael Weiss <dev.primeos@gmail.com>";
|
||||
profpatsch = "Profpatsch <mail@profpatsch.de>";
|
||||
proglodyte = "Proglodyte <proglodyte23@gmail.com>";
|
||||
pshendry = "Paul Hendry <paul@pshendry.com>";
|
||||
psibi = "Sibi <sibi@psibi.in>";
|
||||
pstn = "Philipp Steinpaß <philipp@xndr.de>";
|
||||
pSub = "Pascal Wittmann <mail@pascal-wittmann.de>";
|
||||
puffnfresh = "Brian McKenna <brian@brianmckenna.org>";
|
||||
pxc = "Patrick Callahan <patrick.callahan@latitudeengineering.com>";
|
||||
qknight = "Joachim Schiele <js@lastlog.de>";
|
||||
ragge = "Ragnar Dahlen <r.dahlen@gmail.com>";
|
||||
ralith = "Benjamin Saunders <ben.e.saunders@gmail.com>";
|
||||
ramkromberg = "Ram Kromberg <ramkromberg@mail.com>";
|
||||
rardiol = "Ricardo Ardissone <ricardo.ardissone@gmail.com>";
|
||||
rasendubi = "Alexey Shmalko <rasen.dubi@gmail.com>";
|
||||
raskin = "Michael Raskin <7c6f434c@mail.ru>";
|
||||
rbasso = "Rafael Basso <rbasso@sharpgeeks.net>";
|
||||
redbaron = "Maxim Ivanov <ivanov.maxim@gmail.com>";
|
||||
redvers = "Redvers Davies <red@infect.me>";
|
||||
refnil = "Martin Lavoie <broemartino@gmail.com>";
|
||||
regnat = "Théophane Hufschmitt <regnat@regnat.ovh>";
|
||||
relrod = "Ricky Elrod <ricky@elrod.me>";
|
||||
renzo = "Renzo Carbonara <renzocarbonara@gmail.com>";
|
||||
retrry = "Tadas Barzdžius <retrry@gmail.com>";
|
||||
rick68 = "Wei-Ming Yang <rick68@gmail.com>";
|
||||
rickynils = "Rickard Nilsson <rickynils@gmail.com>";
|
||||
rlupton20 = "Richard Lupton <richard.lupton@gmail.com>";
|
||||
rnhmjoj = "Michele Guerini Rocco <micheleguerinirocco@me.com>";
|
||||
rob = "Rob Vermaas <rob.vermaas@gmail.com>";
|
||||
robberer = "Longrin Wischnewski <robberer@freakmail.de>";
|
||||
robbinch = "Robbin C. <robbinch33@gmail.com>";
|
||||
robgssp = "Rob Glossop <robgssp@gmail.com>";
|
||||
roblabla = "Robin Lambertz <robinlambertz+dev@gmail.com>";
|
||||
roconnor = "Russell O'Connor <roconnor@theorem.ca>";
|
||||
roelof = "Roelof Wobben <rwobben@hotmail.com>";
|
||||
romildo = "José Romildo Malaquias <malaquias@gmail.com>";
|
||||
ronny = "Ronny Pfannschmidt <nixos@ronnypfannschmidt.de>";
|
||||
rszibele = "Richard Szibele <richard_szibele@hotmail.com>";
|
||||
rushmorem = "Rushmore Mushambi <rushmore@webenchanter.com>";
|
||||
rvl = "Rodney Lorrimar <dev+nix@rodney.id.au>";
|
||||
rvlander = "Gaëtan André <rvlander@gaetanandre.eu>";
|
||||
ryanartecona = "Ryan Artecona <ryanartecona@gmail.com>";
|
||||
ryansydnor = "Ryan Sydnor <ryan.t.sydnor@gmail.com>";
|
||||
ryantm = "Ryan Mulligan <ryan@ryantm.com>";
|
||||
rycee = "Robert Helgesson <robert@rycee.net>";
|
||||
ryneeverett = "Ryne Everett <ryneeverett@gmail.com>";
|
||||
s1lvester = "Markus Silvester <s1lvester@bockhacker.me>";
|
||||
samuelrivas = "Samuel Rivas <samuelrivas@gmail.com>";
|
||||
sander = "Sander van der Burg <s.vanderburg@tudelft.nl>";
|
||||
schmitthenner = "Fabian Schmitthenner <development@schmitthenner.eu>";
|
||||
schneefux = "schneefux <schneefux+nixos_pkg@schneefux.xyz>";
|
||||
schristo = "Scott Christopher <schristopher@konputa.com>";
|
||||
scolobb = "Sergiu Ivanov <sivanov@colimite.fr>";
|
||||
sepi = "Raffael Mancini <raffael@mancini.lu>";
|
||||
seppeljordan = "Sebastian Jordan <sebastian.jordan.mail@googlemail.com>";
|
||||
sheenobu = "Sheena Artrip <sheena.artrip@gmail.com>";
|
||||
sheganinans = "Aistis Raulinaitis <sheganinans@gmail.com>";
|
||||
shell = "Shell Turner <cam.turn@gmail.com>";
|
||||
shlevy = "Shea Levy <shea@shealevy.com>";
|
||||
siddharthist = "Langston Barrett <langston.barrett@gmail.com>";
|
||||
simonvandel = "Simon Vandel Sillesen <simon.vandel@gmail.com>";
|
||||
sjagoe = "Simon Jagoe <simon@simonjagoe.com>";
|
||||
simons = "Peter Simons <simons@cryp.to>";
|
||||
sjmackenzie = "Stewart Mackenzie <setori88@gmail.com>";
|
||||
sjourdois = "Stéphane ‘kwisatz’ Jourdois <sjourdois@gmail.com>";
|
||||
skeidel = "Sven Keidel <svenkeidel@gmail.com>";
|
||||
skrzyp = "Jakub Skrzypnik <jot.skrzyp@gmail.com>";
|
||||
sleexyz = "Sean Lee <freshdried@gmail.com>";
|
||||
smironov = "Sergey Mironov <grrwlf@gmail.com>";
|
||||
solson = "Scott Olson <scott@solson.me>";
|
||||
spacefrogg = "Michael Raitza <spacefrogg-nixos@meterriblecrew.net>";
|
||||
spencerjanssen = "Spencer Janssen <spencerjanssen@gmail.com>";
|
||||
spinus = "Tomasz Czyż <tomasz.czyz@gmail.com>";
|
||||
smironov = "Sergey Mironov <ierton@gmail.com>";
|
||||
sprock = "Roger Mason <rmason@mun.ca>";
|
||||
spwhitt = "Spencer Whitt <sw@swhitt.me>";
|
||||
SShrike = "Severen Redwood <severen@shrike.me>";
|
||||
stephenmw = "Stephen Weinberg <stephen@q5comm.com>";
|
||||
sternenseemann = "Lukas Epple <post@lukasepple.de>";
|
||||
steveej = "Stefan Junker <mail@stefanjunker.de>";
|
||||
swarren83 = "Shawn Warren <shawn.w.warren@gmail.com>";
|
||||
swistak35 = "Rafał Łasocha <me@swistak35.com>";
|
||||
szczyp = "Szczyp <qb@szczyp.com>";
|
||||
sztupi = "Attila Sztupak <attila.sztupak@gmail.com>";
|
||||
taeer = "Taeer Bar-Yam <taeer@necsi.edu>";
|
||||
tailhook = "Paul Colomiets <paul@colomiets.name>";
|
||||
taktoa = "Remy Goldschmidt <taktoa@gmail.com>";
|
||||
tavyc = "Octavian Cerna <octavian.cerna@gmail.com>";
|
||||
teh = "Tom Hunger <tehunger@gmail.com>";
|
||||
telotortium = "Robert Irelan <rirelan@gmail.com>";
|
||||
thall = "Niclas Thall <niclas.thall@gmail.com>";
|
||||
thammers = "Tobias Hammerschmidt <jawr@gmx.de>";
|
||||
the-kenny = "Moritz Ulrich <moritz@tarn-vedra.de>";
|
||||
theuni = "Christian Theune <ct@flyingcircus.io>";
|
||||
thoughtpolice = "Austin Seipp <aseipp@pobox.com>";
|
||||
timbertson = "Tim Cuthbertson <tim@gfxmonk.net>";
|
||||
titanous = "Jonathan Rudenberg <jonathan@titanous.com>";
|
||||
tohl = "Tomas Hlavaty <tom@logand.com>";
|
||||
tokudan = "Daniel Frank <git@danielfrank.net>";
|
||||
tomberek = "Thomas Bereknyei <tomberek@gmail.com>";
|
||||
travisbhartwell = "Travis B. Hartwell <nafai@travishartwell.net>";
|
||||
trino = "Hubert Mühlhans <muehlhans.hubert@ekodia.de>";
|
||||
tstrobel = "Thomas Strobel <4ZKTUB6TEP74PYJOPWIR013S2AV29YUBW5F9ZH2F4D5UMJUJ6S@hash.domains>";
|
||||
tstrobel = "Thomas Strobel <ts468@cam.ac.uk>";
|
||||
ttuegel = "Thomas Tuegel <ttuegel@gmail.com>";
|
||||
tv = "Tomislav Viljetić <tv@shackspace.de>";
|
||||
tvestelind = "Tomas Vestelind <tomas.vestelind@fripost.org>";
|
||||
tvorog = "Marsel Zaripov <marszaripov@gmail.com>";
|
||||
twey = "James ‘Twey’ Kay <twey@twey.co.uk>";
|
||||
uralbash = "Svintsov Dmitry <root@uralbash.ru>";
|
||||
urkud = "Yury G. Kudryashov <urkud+nix@ya.ru>";
|
||||
uwap = "uwap <me@uwap.name>";
|
||||
vandenoever = "Jos van den Oever <jos@vandenoever.info>";
|
||||
vanzef = "Ivan Solyankin <vanzef@gmail.com>";
|
||||
vbgl = "Vincent Laporte <Vincent.Laporte@gmail.com>";
|
||||
vbmithr = "Vincent Bernardoff <vb@luminar.eu.org>";
|
||||
vcunat = "Vladimír Čunát <vcunat@gmail.com>";
|
||||
vdemeester = "Vincent Demeester <vincent@sbr.pm>";
|
||||
veprbl = "Dmitry Kalinkin <veprbl@gmail.com>";
|
||||
viric = "Lluís Batlle i Rossell <viric@viric.name>";
|
||||
vizanto = "Danny Wilson <danny@prime.vc>";
|
||||
vklquevs = "vklquevs <vklquevs@gmail.com>";
|
||||
vlstill = "Vladimír Štill <xstill@fi.muni.cz>";
|
||||
vmandela = "Venkateswara Rao Mandela <venkat.mandela@gmail.com>";
|
||||
volhovm = "Mikhail Volkhov <volhovm.cs@gmail.com>";
|
||||
volth = "Jaroslavas Pocepko <jaroslavas@volth.com>";
|
||||
vozz = "Oliver Hunt <oliver.huntuk@gmail.com>";
|
||||
vrthra = "Rahul Gopinath <rahul@gopinath.org>";
|
||||
wedens = "wedens <kirill.wedens@gmail.com>";
|
||||
willtim = "Tim Philip Williams <tim.williams.public@gmail.com>";
|
||||
winden = "Antonio Vargas Gonzalez <windenntw@gmail.com>";
|
||||
wizeman = "Ricardo M. Correia <rcorreia@wizy.org>";
|
||||
wjlroe = "William Roe <willroe@gmail.com>";
|
||||
wkennington = "William A. Kennington III <william@wkennington.com>";
|
||||
wmertens = "Wout Mertens <Wout.Mertens@gmail.com>";
|
||||
womfoo = "Kranium Gikos Mendoza <kranium@gikos.net>";
|
||||
wscott = "Wayne Scott <wsc9tt@gmail.com>";
|
||||
wyvie = "Elijah Rum <elijahrum@gmail.com>";
|
||||
yarr = "Dmitry V. <savraz@gmail.com>";
|
||||
yochai = "Yochai <yochai@titat.info>";
|
||||
yurrriq = "Eric Bailey <eric@ericb.me>";
|
||||
z77z = "Marco Maggesi <maggesi@math.unifi.it>";
|
||||
zagy = "Christian Zagrodnick <cz@flyingcircus.io>";
|
||||
zef = "Zef Hemel <zef@zef.me>";
|
||||
zimbatm = "zimbatm <zimbatm@zimbatm.com>";
|
||||
zohl = "Al Zohali <zohl@fmap.me>";
|
||||
zoomulator = "Kim Simmons <zoomulator@gmail.com>";
|
||||
Gonzih = "Max Gonzih <gonzih@gmail.com>";
|
||||
}
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
# Expose the minimum required version for evaluating Nixpkgs
|
||||
"1.10"
|
||||
426
lib/misc.nix
Normal file
426
lib/misc.nix
Normal file
@@ -0,0 +1,426 @@
|
||||
let lib = import ./default.nix;
|
||||
inherit (builtins) isFunction head tail isList isAttrs isInt attrNames;
|
||||
|
||||
in
|
||||
|
||||
with import ./lists.nix;
|
||||
with import ./attrsets.nix;
|
||||
with import ./strings.nix;
|
||||
|
||||
rec {
|
||||
|
||||
# returns default if env var is not set
|
||||
maybeEnv = name: default:
|
||||
let value = builtins.getEnv name; in
|
||||
if value == "" then default else value;
|
||||
|
||||
defaultMergeArg = x : y: if builtins.isAttrs y then
|
||||
y
|
||||
else
|
||||
(y x);
|
||||
defaultMerge = x: y: x // (defaultMergeArg x y);
|
||||
foldArgs = merger: f: init: x:
|
||||
let arg=(merger init (defaultMergeArg init x));
|
||||
# now add the function with composed args already applied to the final attrs
|
||||
base = (setAttrMerge "passthru" {} (f arg)
|
||||
( z : z // rec {
|
||||
function = foldArgs merger f arg;
|
||||
args = (lib.attrByPath ["passthru" "args"] {} z) // x;
|
||||
} ));
|
||||
withStdOverrides = base // {
|
||||
override = base.passthru.function;
|
||||
deepOverride = a : (base.passthru.function ((lib.mapAttrs (lib.deepOverrider a) base.passthru.args) // a));
|
||||
} ;
|
||||
in
|
||||
withStdOverrides;
|
||||
|
||||
|
||||
# predecessors: proposed replacement for applyAndFun (which has a bug cause it merges twice)
|
||||
# the naming "overridableDelayableArgs" tries to express that you can
|
||||
# - override attr values which have been supplied earlier
|
||||
# - use attr values before they have been supplied by accessing the fix point
|
||||
# name "fixed"
|
||||
# f: the (delayed overridden) arguments are applied to this
|
||||
#
|
||||
# initial: initial attrs arguments and settings. see defaultOverridableDelayableArgs
|
||||
#
|
||||
# returns: f applied to the arguments // special attributes attrs
|
||||
# a) merge: merge applied args with new args. Wether an argument is overridden depends on the merge settings
|
||||
# b) replace: this let's you replace and remove names no matter which merge function has been set
|
||||
#
|
||||
# examples: see test cases "res" below;
|
||||
overridableDelayableArgs =
|
||||
f : # the function applied to the arguments
|
||||
initial : # you pass attrs, the functions below are passing a function taking the fix argument
|
||||
let
|
||||
takeFixed = if isFunction initial then initial else (fixed : initial); # transform initial to an expression always taking the fixed argument
|
||||
tidy = args :
|
||||
let # apply all functions given in "applyPreTidy" in sequence
|
||||
applyPreTidyFun = fold ( n : a : x : n ( a x ) ) lib.id (maybeAttr "applyPreTidy" [] args);
|
||||
in removeAttrs (applyPreTidyFun args) ( ["applyPreTidy"] ++ (maybeAttr "removeAttrs" [] args) ); # tidy up args before applying them
|
||||
fun = n : x :
|
||||
let newArgs = fixed :
|
||||
let args = takeFixed fixed;
|
||||
mergeFun = args.${n};
|
||||
in if isAttrs x then (mergeFun args x)
|
||||
else assert isFunction x;
|
||||
mergeFun args (x ( args // { inherit fixed; }));
|
||||
in overridableDelayableArgs f newArgs;
|
||||
in
|
||||
(f (tidy (lib.fix takeFixed))) // {
|
||||
merge = fun "mergeFun";
|
||||
replace = fun "keepFun";
|
||||
};
|
||||
defaultOverridableDelayableArgs = f :
|
||||
let defaults = {
|
||||
mergeFun = mergeAttrByFunc; # default merge function. merge strategie (concatenate lists, strings) is given by mergeAttrBy
|
||||
keepFun = a : b : { inherit (a) removeAttrs mergeFun keepFun mergeAttrBy; } // b; # even when using replace preserve these values
|
||||
applyPreTidy = []; # list of functions applied to args before args are tidied up (usage case : prepareDerivationArgs)
|
||||
mergeAttrBy = mergeAttrBy // {
|
||||
applyPreTidy = a : b : a ++ b;
|
||||
removeAttrs = a : b: a ++ b;
|
||||
};
|
||||
removeAttrs = ["mergeFun" "keepFun" "mergeAttrBy" "removeAttrs" "fixed" ]; # before applying the arguments to the function make sure these names are gone
|
||||
};
|
||||
in (overridableDelayableArgs f defaults).merge;
|
||||
|
||||
|
||||
|
||||
# rec { # an example of how composedArgsAndFun can be used
|
||||
# a = composedArgsAndFun (x : x) { a = ["2"]; meta = { d = "bar";}; };
|
||||
# # meta.d will be lost ! It's your task to preserve it (eg using a merge function)
|
||||
# b = a.passthru.function { a = [ "3" ]; meta = { d2 = "bar2";}; };
|
||||
# # instead of passing/ overriding values you can use a merge function:
|
||||
# c = b.passthru.function ( x: { a = x.a ++ ["4"]; }); # consider using (maybeAttr "a" [] x)
|
||||
# }
|
||||
# result:
|
||||
# {
|
||||
# a = { a = ["2"]; meta = { d = "bar"; }; passthru = { function = .. }; };
|
||||
# b = { a = ["3"]; meta = { d2 = "bar2"; }; passthru = { function = .. }; };
|
||||
# c = { a = ["3" "4"]; meta = { d2 = "bar2"; }; passthru = { function = .. }; };
|
||||
# # c2 is equal to c
|
||||
# }
|
||||
composedArgsAndFun = f: foldArgs defaultMerge f {};
|
||||
|
||||
|
||||
# shortcut for attrByPath ["name"] default attrs
|
||||
maybeAttrNullable = maybeAttr;
|
||||
|
||||
# shortcut for attrByPath ["name"] default attrs
|
||||
maybeAttr = name: default: attrs: attrs.${name} or default;
|
||||
|
||||
|
||||
# Return the second argument if the first one is true or the empty version
|
||||
# of the second argument.
|
||||
ifEnable = cond: val:
|
||||
if cond then val
|
||||
else if builtins.isList val then []
|
||||
else if builtins.isAttrs val then {}
|
||||
# else if builtins.isString val then ""
|
||||
else if val == true || val == false then false
|
||||
else null;
|
||||
|
||||
|
||||
# Return true only if there is an attribute and it is true.
|
||||
checkFlag = attrSet: name:
|
||||
if name == "true" then true else
|
||||
if name == "false" then false else
|
||||
if (elem name (attrByPath ["flags"] [] attrSet)) then true else
|
||||
attrByPath [name] false attrSet ;
|
||||
|
||||
|
||||
# Input : attrSet, [ [name default] ... ], name
|
||||
# Output : its value or default.
|
||||
getValue = attrSet: argList: name:
|
||||
( attrByPath [name] (if checkFlag attrSet name then true else
|
||||
if argList == [] then null else
|
||||
let x = builtins.head argList; in
|
||||
if (head x) == name then
|
||||
(head (tail x))
|
||||
else (getValue attrSet
|
||||
(tail argList) name)) attrSet );
|
||||
|
||||
|
||||
# Input : attrSet, [[name default] ...], [ [flagname reqs..] ... ]
|
||||
# Output : are reqs satisfied? It's asserted.
|
||||
checkReqs = attrSet : argList : condList :
|
||||
(
|
||||
fold lib.and true
|
||||
(map (x: let name = (head x) ; in
|
||||
|
||||
((checkFlag attrSet name) ->
|
||||
(fold lib.and true
|
||||
(map (y: let val=(getValue attrSet argList y); in
|
||||
(val!=null) && (val!=false))
|
||||
(tail x))))) condList)) ;
|
||||
|
||||
|
||||
# This function has O(n^2) performance.
|
||||
uniqList = {inputList, acc ? []} :
|
||||
let go = xs : acc :
|
||||
if xs == []
|
||||
then []
|
||||
else let x = head xs;
|
||||
y = if elem x acc then [] else [x];
|
||||
in y ++ go (tail xs) (y ++ acc);
|
||||
in go inputList acc;
|
||||
|
||||
uniqListExt = {inputList, outputList ? [],
|
||||
getter ? (x : x), compare ? (x: y: x==y)}:
|
||||
if inputList == [] then outputList else
|
||||
let x=head inputList;
|
||||
isX = y: (compare (getter y) (getter x));
|
||||
newOutputList = outputList ++
|
||||
(if any isX outputList then [] else [x]);
|
||||
in uniqListExt {outputList=newOutputList;
|
||||
inputList = (tail inputList);
|
||||
inherit getter compare;
|
||||
};
|
||||
|
||||
|
||||
|
||||
condConcat = name: list: checker:
|
||||
if list == [] then name else
|
||||
if checker (head list) then
|
||||
condConcat
|
||||
(name + (head (tail list)))
|
||||
(tail (tail list))
|
||||
checker
|
||||
else condConcat
|
||||
name (tail (tail list)) checker;
|
||||
|
||||
lazyGenericClosure = {startSet, operator}:
|
||||
let
|
||||
work = list: doneKeys: result:
|
||||
if list == [] then
|
||||
result
|
||||
else
|
||||
let x = head list; key = x.key; in
|
||||
if elem key doneKeys then
|
||||
work (tail list) doneKeys result
|
||||
else
|
||||
work (tail list ++ operator x) ([key] ++ doneKeys) ([x] ++ result);
|
||||
in
|
||||
work startSet [] [];
|
||||
|
||||
genericClosure = builtins.genericClosure or lazyGenericClosure;
|
||||
|
||||
innerModifySumArgs = f: x: a: b: if b == null then (f a b) // x else
|
||||
innerModifySumArgs f x (a // b);
|
||||
modifySumArgs = f: x: innerModifySumArgs f x {};
|
||||
|
||||
|
||||
innerClosePropagation = acc : xs :
|
||||
if xs == []
|
||||
then acc
|
||||
else let y = head xs;
|
||||
ys = tail xs;
|
||||
in if ! isAttrs y
|
||||
then innerClosePropagation acc ys
|
||||
else let acc' = [y] ++ acc;
|
||||
in innerClosePropagation
|
||||
acc'
|
||||
(uniqList { inputList = (maybeAttrNullable "propagatedBuildInputs" [] y)
|
||||
++ (maybeAttrNullable "propagatedNativeBuildInputs" [] y)
|
||||
++ ys;
|
||||
acc = acc';
|
||||
}
|
||||
);
|
||||
|
||||
closePropagation = list: (uniqList {inputList = (innerClosePropagation [] list);});
|
||||
|
||||
# calls a function (f attr value ) for each record item. returns a list
|
||||
mapAttrsFlatten = f : r : map (attr: f attr r.${attr}) (attrNames r);
|
||||
|
||||
# attribute set containing one attribute
|
||||
nvs = name : value : listToAttrs [ (nameValuePair name value) ];
|
||||
# adds / replaces an attribute of an attribute set
|
||||
setAttr = set : name : v : set // (nvs name v);
|
||||
|
||||
# setAttrMerge (similar to mergeAttrsWithFunc but only merges the values of a particular name)
|
||||
# setAttrMerge "a" [] { a = [2];} (x : x ++ [3]) -> { a = [2 3]; }
|
||||
# setAttrMerge "a" [] { } (x : x ++ [3]) -> { a = [ 3]; }
|
||||
setAttrMerge = name : default : attrs : f :
|
||||
setAttr attrs name (f (maybeAttr name default attrs));
|
||||
|
||||
# Using f = a : b = b the result is similar to //
|
||||
# merge attributes with custom function handling the case that the attribute
|
||||
# exists in both sets
|
||||
mergeAttrsWithFunc = f : set1 : set2 :
|
||||
fold (n: set : if set ? ${n}
|
||||
then setAttr set n (f set.${n} set2.${n})
|
||||
else set )
|
||||
(set2 // set1) (attrNames set2);
|
||||
|
||||
# merging two attribute set concatenating the values of same attribute names
|
||||
# eg { a = 7; } { a = [ 2 3 ]; } becomes { a = [ 7 2 3 ]; }
|
||||
mergeAttrsConcatenateValues = mergeAttrsWithFunc ( a : b : (toList a) ++ (toList b) );
|
||||
|
||||
# merges attributes using //, if a name exisits in both attributes
|
||||
# an error will be triggered unless its listed in mergeLists
|
||||
# so you can mergeAttrsNoOverride { buildInputs = [a]; } { buildInputs = [a]; } {} to get
|
||||
# { buildInputs = [a b]; }
|
||||
# merging buildPhase does'nt really make sense. The cases will be rare where appending /prefixing will fit your needs?
|
||||
# in these cases the first buildPhase will override the second one
|
||||
# ! deprecated, use mergeAttrByFunc instead
|
||||
mergeAttrsNoOverride = { mergeLists ? ["buildInputs" "propagatedBuildInputs"],
|
||||
overrideSnd ? [ "buildPhase" ]
|
||||
} : attrs1 : attrs2 :
|
||||
fold (n: set :
|
||||
setAttr set n ( if set ? ${n}
|
||||
then # merge
|
||||
if elem n mergeLists # attribute contains list, merge them by concatenating
|
||||
then attrs2.${n} ++ attrs1.${n}
|
||||
else if elem n overrideSnd
|
||||
then attrs1.${n}
|
||||
else throw "error mergeAttrsNoOverride, attribute ${n} given in both attributes - no merge func defined"
|
||||
else attrs2.${n} # add attribute not existing in attr1
|
||||
)) attrs1 (attrNames attrs2);
|
||||
|
||||
|
||||
# example usage:
|
||||
# mergeAttrByFunc {
|
||||
# inherit mergeAttrBy; # defined below
|
||||
# buildInputs = [ a b ];
|
||||
# } {
|
||||
# buildInputs = [ c d ];
|
||||
# };
|
||||
# will result in
|
||||
# { mergeAttrsBy = [...]; buildInputs = [ a b c d ]; }
|
||||
# is used by prepareDerivationArgs, defaultOverridableDelayableArgs and can be used when composing using
|
||||
# foldArgs, composedArgsAndFun or applyAndFun. Example: composableDerivation in all-packages.nix
|
||||
mergeAttrByFunc = x : y :
|
||||
let
|
||||
mergeAttrBy2 = { mergeAttrBy=lib.mergeAttrs; }
|
||||
// (maybeAttr "mergeAttrBy" {} x)
|
||||
// (maybeAttr "mergeAttrBy" {} y); in
|
||||
fold lib.mergeAttrs {} [
|
||||
x y
|
||||
(mapAttrs ( a : v : # merge special names using given functions
|
||||
if x ? ${a}
|
||||
then if y ? ${a}
|
||||
then v x.${a} y.${a} # both have attr, use merge func
|
||||
else x.${a} # only x has attr
|
||||
else y.${a} # only y has attr)
|
||||
) (removeAttrs mergeAttrBy2
|
||||
# don't merge attrs which are neither in x nor y
|
||||
(filter (a: ! x ? ${a} && ! y ? ${a})
|
||||
(attrNames mergeAttrBy2))
|
||||
)
|
||||
)
|
||||
];
|
||||
mergeAttrsByFuncDefaults = foldl mergeAttrByFunc { inherit mergeAttrBy; };
|
||||
mergeAttrsByFuncDefaultsClean = list: removeAttrs (mergeAttrsByFuncDefaults list) ["mergeAttrBy"];
|
||||
|
||||
# merge attrs based on version key into mkDerivation args, see mergeAttrBy to learn about smart merge defaults
|
||||
#
|
||||
# This function is best explained by an example:
|
||||
#
|
||||
# {version ? "2.x"} :
|
||||
#
|
||||
# mkDerivation (mergeAttrsByVersion "package-name" version
|
||||
# { # version specific settings
|
||||
# "git" = { src = ..; preConfigre = "autogen.sh"; buildInputs = [automake autoconf libtool]; };
|
||||
# "2.x" = { src = ..; };
|
||||
# }
|
||||
# { // shared settings
|
||||
# buildInputs = [ common build inputs ];
|
||||
# meta = { .. }
|
||||
# }
|
||||
# )
|
||||
#
|
||||
# Please note that e.g. Eelco Dolstra usually prefers having one file for
|
||||
# each version. On the other hand there are valuable additional design goals
|
||||
# - readability
|
||||
# - do it once only
|
||||
# - try to avoid duplication
|
||||
#
|
||||
# Marc Weber and Michael Raskin sometimes prefer keeping older
|
||||
# versions around for testing and regression tests - as long as its cheap to
|
||||
# do so.
|
||||
#
|
||||
# Very often it just happens that the "shared" code is the bigger part.
|
||||
# Then using this function might be appropriate.
|
||||
#
|
||||
# Be aware that its easy to cause recompilations in all versions when using
|
||||
# this function - also if derivations get too complex splitting into multiple
|
||||
# files is the way to go.
|
||||
#
|
||||
# See misc.nix -> versionedDerivation
|
||||
# discussion: nixpkgs: pull/310
|
||||
mergeAttrsByVersion = name: version: attrsByVersion: base:
|
||||
mergeAttrsByFuncDefaultsClean [ { name = "${name}-${version}"; } base (maybeAttr version (throw "bad version ${version} for ${name}") attrsByVersion)];
|
||||
|
||||
# sane defaults (same name as attr name so that inherit can be used)
|
||||
mergeAttrBy = # { buildInputs = concatList; [...]; passthru = mergeAttr; [..]; }
|
||||
listToAttrs (map (n : nameValuePair n lib.concat)
|
||||
[ "nativeBuildInputs" "buildInputs" "propagatedBuildInputs" "configureFlags" "prePhases" "postAll" "patches" ])
|
||||
// listToAttrs (map (n : nameValuePair n lib.mergeAttrs) [ "passthru" "meta" "cfg" "flags" ])
|
||||
// listToAttrs (map (n : nameValuePair n (a: b: "${a}\n${b}") ) [ "preConfigure" "postInstall" ])
|
||||
;
|
||||
|
||||
# prepareDerivationArgs tries to make writing configurable derivations easier
|
||||
# example:
|
||||
# prepareDerivationArgs {
|
||||
# mergeAttrBy = {
|
||||
# myScript = x : y : x ++ "\n" ++ y;
|
||||
# };
|
||||
# cfg = {
|
||||
# readlineSupport = true;
|
||||
# };
|
||||
# flags = {
|
||||
# readline = {
|
||||
# set = {
|
||||
# configureFlags = [ "--with-compiler=${compiler}" ];
|
||||
# buildInputs = [ compiler ];
|
||||
# pass = { inherit compiler; READLINE=1; };
|
||||
# assertion = compiler.dllSupport;
|
||||
# myScript = "foo";
|
||||
# };
|
||||
# unset = { configureFlags = ["--without-compiler"]; };
|
||||
# };
|
||||
# };
|
||||
# src = ...
|
||||
# buildPhase = '' ... '';
|
||||
# name = ...
|
||||
# myScript = "bar";
|
||||
# };
|
||||
# if you don't have need for unset you can omit the surrounding set = { .. } attr
|
||||
# all attrs except flags cfg and mergeAttrBy will be merged with the
|
||||
# additional data from flags depending on config settings
|
||||
# It's used in composableDerivation in all-packages.nix. It's also used
|
||||
# heavily in the new python and libs implementation
|
||||
#
|
||||
# should we check for misspelled cfg options?
|
||||
# TODO use args.mergeFun here as well?
|
||||
prepareDerivationArgs = args:
|
||||
let args2 = { cfg = {}; flags = {}; } // args;
|
||||
flagName = name : "${name}Support";
|
||||
cfgWithDefaults = (listToAttrs (map (n : nameValuePair (flagName n) false) (attrNames args2.flags)))
|
||||
// args2.cfg;
|
||||
opts = attrValues (mapAttrs (a : v :
|
||||
let v2 = if v ? set || v ? unset then v else { set = v; };
|
||||
n = if cfgWithDefaults.${flagName a} then "set" else "unset";
|
||||
attr = maybeAttr n {} v2; in
|
||||
if (maybeAttr "assertion" true attr)
|
||||
then attr
|
||||
else throw "assertion of flag ${a} of derivation ${args.name} failed"
|
||||
) args2.flags );
|
||||
in removeAttrs
|
||||
(mergeAttrsByFuncDefaults ([args] ++ opts ++ [{ passthru = cfgWithDefaults; }]))
|
||||
["flags" "cfg" "mergeAttrBy" ];
|
||||
|
||||
|
||||
nixType = x:
|
||||
if isAttrs x then
|
||||
if x ? outPath then "derivation"
|
||||
else "aattrs"
|
||||
else if isFunction x then "function"
|
||||
else if isList x then "list"
|
||||
else if x == true then "bool"
|
||||
else if x == false then "bool"
|
||||
else if x == null then "null"
|
||||
else if isInt x then "int"
|
||||
else "string";
|
||||
|
||||
}
|
||||
449
lib/modules.nix
449
lib/modules.nix
@@ -1,5 +1,4 @@
|
||||
with import ./lists.nix;
|
||||
with import ./strings.nix;
|
||||
with import ./trivial.nix;
|
||||
with import ./attrsets.nix;
|
||||
with import ./options.nix;
|
||||
@@ -10,75 +9,27 @@ rec {
|
||||
|
||||
/* Evaluate a set of modules. The result is a set of two
|
||||
attributes: ‘options’: the nested set of all option declarations,
|
||||
and ‘config’: the nested set of all option values.
|
||||
!!! Please think twice before adding to this argument list! The more
|
||||
that is specified here instead of in the modules themselves the harder
|
||||
it is to transparently move a set of modules to be a submodule of another
|
||||
config (as the proper arguments need to be replicated at each call to
|
||||
evalModules) and the less declarative the module set is. */
|
||||
evalModules = { modules
|
||||
, prefix ? []
|
||||
, # This should only be used for special arguments that need to be evaluated
|
||||
# when resolving module structure (like in imports). For everything else,
|
||||
# there's _module.args.
|
||||
specialArgs ? {}
|
||||
, # This would be remove in the future, Prefer _module.args option instead.
|
||||
args ? {}
|
||||
, # This would be remove in the future, Prefer _module.check option instead.
|
||||
check ? true
|
||||
}:
|
||||
and ‘config’: the nested set of all option values. */
|
||||
evalModules = { modules, prefix ? [], args ? {}, check ? true }:
|
||||
let
|
||||
# This internal module declare internal options under the `_module'
|
||||
# attribute. These options are fragile, as they are used by the
|
||||
# module system to change the interpretation of modules.
|
||||
internalModule = rec {
|
||||
_file = ./modules.nix;
|
||||
|
||||
key = _file;
|
||||
|
||||
options = {
|
||||
_module.args = mkOption {
|
||||
type = types.attrsOf types.unspecified;
|
||||
internal = true;
|
||||
description = "Arguments passed to each module.";
|
||||
};
|
||||
|
||||
_module.check = mkOption {
|
||||
type = types.bool;
|
||||
internal = true;
|
||||
default = check;
|
||||
description = "Whether to check whether all option definitions have matching declarations.";
|
||||
};
|
||||
};
|
||||
|
||||
config = {
|
||||
_module.args = args;
|
||||
};
|
||||
};
|
||||
|
||||
closed = closeModules (modules ++ [ internalModule ]) ({ inherit config options; lib = import ./.; } // specialArgs);
|
||||
|
||||
args' = args // { lib = import ./.; } // result;
|
||||
closed = closeModules modules args';
|
||||
# Note: the list of modules is reversed to maintain backward
|
||||
# compatibility with the old module system. Not sure if this is
|
||||
# the most sensible policy.
|
||||
options = mergeModules prefix (reverseList closed);
|
||||
|
||||
# Traverse options and extract the option values into the final
|
||||
# config set. At the same time, check whether all option
|
||||
# definitions have matching declarations.
|
||||
# !!! _module.check's value can't depend on any other config values
|
||||
# without an infinite recursion. One way around this is to make the
|
||||
# 'config' passed around to the modules be unconditionally unchecked,
|
||||
# and only do the check in 'result'.
|
||||
config = yieldConfig prefix options;
|
||||
yieldConfig = prefix: set:
|
||||
let res = removeAttrs (mapAttrs (n: v:
|
||||
if isOption v then v.value
|
||||
else yieldConfig (prefix ++ [n]) v) set) ["_definedNames"];
|
||||
in
|
||||
if options._module.check.value && set ? _definedNames then
|
||||
foldl' (res: m:
|
||||
foldl' (res: name:
|
||||
if check && set ? _definedNames then
|
||||
fold (m: res:
|
||||
fold (name: res:
|
||||
if set ? ${name} then res else throw "The option `${showOption (prefix ++ [name])}' defined in `${m.file}' does not exist.")
|
||||
res m.names)
|
||||
res set._definedNames
|
||||
@@ -92,11 +43,9 @@ rec {
|
||||
let
|
||||
toClosureList = file: parentKey: imap (n: x:
|
||||
if isAttrs x || isFunction x then
|
||||
let key = "${parentKey}:anon-${toString n}"; in
|
||||
unifyModuleSyntax file key (unpackSubmodule (applyIfFunction key) x args)
|
||||
unifyModuleSyntax file "${parentKey}:anon-${toString n}" (applyIfFunction x args)
|
||||
else
|
||||
let file = toString x; key = toString x; in
|
||||
unifyModuleSyntax file key (applyIfFunction key (import x) args));
|
||||
unifyModuleSyntax (toString x) (toString x) (applyIfFunction (import x) args));
|
||||
in
|
||||
builtins.genericClosure {
|
||||
startSet = toClosureList unknownModule "" modules;
|
||||
@@ -106,12 +55,8 @@ rec {
|
||||
/* Massage a module into canonical form, that is, a set consisting
|
||||
of ‘options’, ‘config’ and ‘imports’ attributes. */
|
||||
unifyModuleSyntax = file: key: m:
|
||||
let metaSet = if m ? meta
|
||||
then { meta = m.meta; }
|
||||
else {};
|
||||
in
|
||||
if m ? config || m ? options then
|
||||
let badAttrs = removeAttrs m ["imports" "options" "config" "key" "_file" "meta"]; in
|
||||
let badAttrs = removeAttrs m ["imports" "options" "config" "key" "_file"]; in
|
||||
if badAttrs != {} then
|
||||
throw "Module `${key}' has an unsupported attribute `${head (attrNames badAttrs)}'. This is caused by assignments to the top-level attributes `config' or `options'."
|
||||
else
|
||||
@@ -119,56 +64,17 @@ rec {
|
||||
key = toString m.key or key;
|
||||
imports = m.imports or [];
|
||||
options = m.options or {};
|
||||
config = mkMerge [ (m.config or {}) metaSet ];
|
||||
config = m.config or {};
|
||||
}
|
||||
else
|
||||
{ file = m._file or file;
|
||||
key = toString m.key or key;
|
||||
imports = m.require or [] ++ m.imports or [];
|
||||
options = {};
|
||||
config = mkMerge [ (removeAttrs m ["key" "_file" "require" "imports"]) metaSet ];
|
||||
config = removeAttrs m ["key" "_file" "require" "imports"];
|
||||
};
|
||||
|
||||
applyIfFunction = key: f: args@{ config, options, lib, ... }: if isFunction f then
|
||||
let
|
||||
# Module arguments are resolved in a strict manner when attribute set
|
||||
# deconstruction is used. As the arguments are now defined with the
|
||||
# config._module.args option, the strictness used on the attribute
|
||||
# set argument would cause an infinite loop, if the result of the
|
||||
# option is given as argument.
|
||||
#
|
||||
# To work-around the strictness issue on the deconstruction of the
|
||||
# attributes set argument, we create a new attribute set which is
|
||||
# constructed to satisfy the expected set of attributes. Thus calling
|
||||
# a module will resolve strictly the attributes used as argument but
|
||||
# not their values. The values are forwarding the result of the
|
||||
# evaluation of the option.
|
||||
requiredArgs = builtins.attrNames (builtins.functionArgs f);
|
||||
context = name: ''while evaluating the module argument `${name}' in "${key}":'';
|
||||
extraArgs = builtins.listToAttrs (map (name: {
|
||||
inherit name;
|
||||
value = addErrorContext (context name)
|
||||
(args.${name} or config._module.args.${name});
|
||||
}) requiredArgs);
|
||||
|
||||
# Note: we append in the opposite order such that we can add an error
|
||||
# context on the explicited arguments of "args" too. This update
|
||||
# operator is used to make the "args@{ ... }: with args.lib;" notation
|
||||
# works.
|
||||
in f (args // extraArgs)
|
||||
else
|
||||
f;
|
||||
|
||||
/* We have to pack and unpack submodules. We cannot wrap the expected
|
||||
result of the function as we would no longer be able to list the arguments
|
||||
of the submodule. (see applyIfFunction) */
|
||||
unpackSubmodule = unpack: m: args:
|
||||
if isType "submodule" m then
|
||||
{ _file = m.file; } // (unpack m.submodule args)
|
||||
else unpack m args;
|
||||
|
||||
packSubmodule = file: m:
|
||||
{ _type = "submodule"; file = file; submodule = m; };
|
||||
applyIfFunction = f: arg: if isFunction f then f arg else f;
|
||||
|
||||
/* Merge a list of modules. This will recurse over the option
|
||||
declarations in all modules, combining them into a single set.
|
||||
@@ -187,22 +93,25 @@ rec {
|
||||
let
|
||||
loc = prefix ++ [name];
|
||||
# Get all submodules that declare ‘name’.
|
||||
decls = concatMap (m:
|
||||
decls = concatLists (map (m:
|
||||
if m.options ? ${name}
|
||||
then [ { inherit (m) file; options = m.options.${name}; } ]
|
||||
else []
|
||||
) options;
|
||||
) options);
|
||||
# Get all submodules that define ‘name’.
|
||||
defns = concatMap (m:
|
||||
defns = concatLists (map (m:
|
||||
if m.config ? ${name}
|
||||
then map (config: { inherit (m) file; inherit config; })
|
||||
(pushDownProperties m.config.${name})
|
||||
else []
|
||||
) configs;
|
||||
) configs);
|
||||
nrOptions = count (m: isOption m.options) decls;
|
||||
# Extract the definitions for this loc
|
||||
defns' = map (m: { inherit (m) file; value = m.config.${name}; })
|
||||
(filter (m: m.config ? ${name}) configs);
|
||||
# Process mkMerge and mkIf properties.
|
||||
defns' = concatMap (m:
|
||||
if m.config ? ${name}
|
||||
then map (m': { inherit (m) file; value = m'; }) (dischargeProperties m.config.${name})
|
||||
else []
|
||||
) configs;
|
||||
in
|
||||
if nrOptions == length decls then
|
||||
let opt = fixupOptionType loc (mergeOptionDecls loc decls);
|
||||
@@ -230,21 +139,13 @@ rec {
|
||||
'opts' is a list of modules. Each module has an options attribute which
|
||||
correspond to the definition of 'loc' in 'opt.file'. */
|
||||
mergeOptionDecls = loc: opts:
|
||||
foldl' (res: opt:
|
||||
let t = res.type;
|
||||
t' = opt.options.type;
|
||||
mergedType = t.typeMerge t'.functor;
|
||||
typesMergeable = mergedType != null;
|
||||
typeSet = if (bothHave "type") && typesMergeable
|
||||
then { type = mergedType; }
|
||||
else {};
|
||||
bothHave = k: opt.options ? ${k} && res ? ${k};
|
||||
in
|
||||
if bothHave "default" ||
|
||||
bothHave "example" ||
|
||||
bothHave "description" ||
|
||||
bothHave "apply" ||
|
||||
(bothHave "type" && (! typesMergeable))
|
||||
fold (opt: res:
|
||||
if opt.options ? default && res ? default ||
|
||||
opt.options ? example && res ? example ||
|
||||
opt.options ? description && res ? description ||
|
||||
opt.options ? apply && res ? apply ||
|
||||
# Accept to merge options which have identical types.
|
||||
opt.options ? type && res ? type && opt.options.type.name != res.type.name
|
||||
then
|
||||
throw "The option `${showOption loc}' in `${opt.file}' is already declared in ${showFiles res.declarations}."
|
||||
else
|
||||
@@ -255,87 +156,59 @@ rec {
|
||||
current option declaration as the file use for the submodule. If the
|
||||
submodule defines any filename, then we ignore the enclosing option file. */
|
||||
options' = toList opt.options.options;
|
||||
addModuleFile = m:
|
||||
if isFunction m then args: { _file = opt.file; } // (m args)
|
||||
else { _file = opt.file; } // m;
|
||||
coerceOption = file: opt:
|
||||
if isFunction opt then packSubmodule file opt
|
||||
else packSubmodule file { options = opt; };
|
||||
if isFunction opt then args: { _file = file; } // (opt args)
|
||||
else { _file = file; options = opt; };
|
||||
getSubModules = opt.options.type.getSubModules or null;
|
||||
submodules =
|
||||
if getSubModules != null then map (packSubmodule opt.file) getSubModules ++ res.options
|
||||
if getSubModules != null then map addModuleFile getSubModules ++ res.options
|
||||
else if opt.options ? options then map (coerceOption opt.file) options' ++ res.options
|
||||
else res.options;
|
||||
in opt.options // res //
|
||||
{ declarations = res.declarations ++ [opt.file];
|
||||
{ declarations = [opt.file] ++ res.declarations;
|
||||
options = submodules;
|
||||
} // typeSet
|
||||
}
|
||||
) { inherit loc; declarations = []; options = []; } opts;
|
||||
|
||||
/* Merge all the definitions of an option to produce the final
|
||||
config value. */
|
||||
evalOptionValue = loc: opt: defs:
|
||||
let
|
||||
# Add in the default value for this option, if any.
|
||||
defs' =
|
||||
(optional (opt ? default)
|
||||
{ file = head opt.declarations; value = mkOptionDefault opt.default; }) ++ defs;
|
||||
|
||||
# Handle properties, check types, and merge everything together.
|
||||
res =
|
||||
if opt.readOnly or false && length defs' > 1 then
|
||||
throw "The option `${showOption loc}' is read-only, but it's set multiple times."
|
||||
else
|
||||
mergeDefinitions loc opt.type defs';
|
||||
|
||||
# Check whether the option is defined, and apply the ‘apply’
|
||||
# function to the merged value. This allows options to yield a
|
||||
# value computed from the definitions.
|
||||
value =
|
||||
if !res.isDefined then
|
||||
# Process mkOverride properties, adding in the default
|
||||
# value specified in the option declaration (if any).
|
||||
defsFinal' = filterOverrides
|
||||
((if opt ? default then [{ file = head opt.declarations; value = mkOptionDefault opt.default; }] else []) ++ defs);
|
||||
# Sort mkOrder properties.
|
||||
defsFinal =
|
||||
# Avoid sorting if we don't have to.
|
||||
if any (def: def.value._type or "" == "order") defsFinal'
|
||||
then sortProperties defsFinal'
|
||||
else defsFinal';
|
||||
files = map (def: def.file) defsFinal;
|
||||
# Type-check the remaining definitions, and merge them if
|
||||
# possible.
|
||||
merged =
|
||||
if defsFinal == [] then
|
||||
throw "The option `${showOption loc}' is used but not defined."
|
||||
else if opt ? apply then
|
||||
opt.apply res.mergedValue
|
||||
else
|
||||
res.mergedValue;
|
||||
|
||||
fold (def: res:
|
||||
if opt.type.check def.value then res
|
||||
else throw "The option value `${showOption loc}' in `${def.file}' is not a ${opt.type.name}.")
|
||||
(opt.type.merge loc defsFinal) defsFinal;
|
||||
# Finally, apply the ‘apply’ function to the merged
|
||||
# value. This allows options to yield a value computed
|
||||
# from the definitions.
|
||||
value = (opt.apply or id) merged;
|
||||
in opt //
|
||||
{ value = addErrorContext "while evaluating the option `${showOption loc}':" value;
|
||||
definitions = map (def: def.value) res.defsFinal;
|
||||
files = map (def: def.file) res.defsFinal;
|
||||
inherit (res) isDefined;
|
||||
definitions = map (def: def.value) defsFinal;
|
||||
isDefined = defsFinal != [];
|
||||
inherit files;
|
||||
};
|
||||
|
||||
# Merge definitions of a value of a given type.
|
||||
mergeDefinitions = loc: type: defs: rec {
|
||||
defsFinal =
|
||||
let
|
||||
# Process mkMerge and mkIf properties.
|
||||
defs' = concatMap (m:
|
||||
map (value: { inherit (m) file; inherit value; }) (dischargeProperties m.value)
|
||||
) defs;
|
||||
|
||||
# Process mkOverride properties.
|
||||
defs'' = filterOverrides defs';
|
||||
|
||||
# Sort mkOrder properties.
|
||||
defs''' =
|
||||
# Avoid sorting if we don't have to.
|
||||
if any (def: def.value._type or "" == "order") defs''
|
||||
then sortProperties defs''
|
||||
else defs'';
|
||||
in defs''';
|
||||
|
||||
# Type-check the remaining definitions, and merge them.
|
||||
mergedValue = foldl' (res: def:
|
||||
if type.check def.value then res
|
||||
else throw "The option value `${showOption loc}' in `${def.file}' is not a ${type.name}.")
|
||||
(type.merge loc defsFinal) defsFinal;
|
||||
|
||||
isDefined = defsFinal != [];
|
||||
|
||||
optionalValue =
|
||||
if isDefined then { value = mergedValue; }
|
||||
else {};
|
||||
};
|
||||
|
||||
/* Given a config set, expand mkMerge properties, and push down the
|
||||
other properties into the children. The result is a list of
|
||||
config sets that do not have properties at top-level. For
|
||||
@@ -375,13 +248,10 @@ rec {
|
||||
if def._type or "" == "merge" then
|
||||
concatMap dischargeProperties def.contents
|
||||
else if def._type or "" == "if" then
|
||||
if isBool def.condition then
|
||||
if def.condition then
|
||||
dischargeProperties def.content
|
||||
else
|
||||
[ ]
|
||||
if def.condition then
|
||||
dischargeProperties def.content
|
||||
else
|
||||
throw "‘mkIf’ called with a non-Boolean condition"
|
||||
[ ]
|
||||
else
|
||||
[ def ];
|
||||
|
||||
@@ -408,7 +278,8 @@ rec {
|
||||
let
|
||||
defaultPrio = 100;
|
||||
getPrio = def: if def.value._type or "" == "override" then def.value.priority else defaultPrio;
|
||||
highestPrio = foldl' (prio: def: min (getPrio def) prio) 9999 defs;
|
||||
min = x: y: if x < y then x else y;
|
||||
highestPrio = fold (def: prio: min (getPrio def) prio) 9999 defs;
|
||||
strip = def: if def.value._type or "" == "override" then def // { value = def.value.content; } else def;
|
||||
in concatMap (def: if getPrio def == highestPrio then [(strip def)] else []) defs;
|
||||
|
||||
@@ -433,14 +304,12 @@ rec {
|
||||
options = opt.options or
|
||||
(throw "Option `${showOption loc'}' has type optionSet but has no option attribute, in ${showFiles opt.declarations}.");
|
||||
f = tp:
|
||||
let optionSetIn = type: (tp.name == type) && (tp.functor.wrapped.name == "optionSet");
|
||||
in
|
||||
if tp.name == "option set" || tp.name == "submodule" then
|
||||
throw "The option ${showOption loc} uses submodules without a wrapping type, in ${showFiles opt.declarations}."
|
||||
else if optionSetIn "attrsOf" then types.attrsOf (types.submodule options)
|
||||
else if optionSetIn "loaOf" then types.loaOf (types.submodule options)
|
||||
else if optionSetIn "listOf" then types.listOf (types.submodule options)
|
||||
else if optionSetIn "nullOr" then types.nullOr (types.submodule options)
|
||||
else if tp.name == "attribute set of option sets" then types.attrsOf (types.submodule options)
|
||||
else if tp.name == "list or attribute set of option sets" then types.loaOf (types.submodule options)
|
||||
else if tp.name == "list of option sets" then types.listOf (types.submodule options)
|
||||
else if tp.name == "null or option set" then types.nullOr (types.submodule options)
|
||||
else tp;
|
||||
in
|
||||
if opt.type.getSubModules or null == null
|
||||
@@ -488,179 +357,7 @@ rec {
|
||||
mkAfter = mkOrder 1500;
|
||||
|
||||
|
||||
# Convenient property used to transfer all definitions and their
|
||||
# properties from one option to another. This property is useful for
|
||||
# renaming options, and also for including properties from another module
|
||||
# system, including sub-modules.
|
||||
#
|
||||
# { config, options, ... }:
|
||||
#
|
||||
# {
|
||||
# # 'bar' might not always be defined in the current module-set.
|
||||
# config.foo.enable = mkAliasDefinitions (options.bar.enable or {});
|
||||
#
|
||||
# # 'barbaz' has to be defined in the current module-set.
|
||||
# config.foobar.paths = mkAliasDefinitions options.barbaz.paths;
|
||||
# }
|
||||
#
|
||||
# Note, this is different than taking the value of the option and using it
|
||||
# as a definition, as the new definition will not keep the mkOverride /
|
||||
# mkDefault properties of the previous option.
|
||||
#
|
||||
mkAliasDefinitions = mkAliasAndWrapDefinitions id;
|
||||
mkAliasAndWrapDefinitions = wrap: option:
|
||||
mkMerge
|
||||
(optional (isOption option && option.isDefined)
|
||||
(wrap (mkMerge option.definitions)));
|
||||
|
||||
|
||||
/* Compatibility. */
|
||||
fixMergeModules = modules: args: evalModules { inherit modules args; check = false; };
|
||||
|
||||
|
||||
/* Return a module that causes a warning to be shown if the
|
||||
specified option is defined. For example,
|
||||
|
||||
mkRemovedOptionModule [ "boot" "loader" "grub" "bootDevice" ] "<replacement instructions>"
|
||||
|
||||
causes a warning if the user defines boot.loader.grub.bootDevice.
|
||||
|
||||
replacementInstructions is a string that provides instructions on
|
||||
how to achieve the same functionality without the removed option,
|
||||
or alternatively a reasoning why the functionality is not needed.
|
||||
replacementInstructions SHOULD be provided!
|
||||
*/
|
||||
mkRemovedOptionModule = optionName: replacementInstructions:
|
||||
{ options, ... }:
|
||||
{ options = setAttrByPath optionName (mkOption {
|
||||
visible = false;
|
||||
});
|
||||
config.warnings =
|
||||
let opt = getAttrFromPath optionName options; in
|
||||
optional opt.isDefined ''
|
||||
The option definition `${showOption optionName}' in ${showFiles opt.files} no longer has any effect; please remove it.
|
||||
${replacementInstructions}'';
|
||||
};
|
||||
|
||||
/* Return a module that causes a warning to be shown if the
|
||||
specified "from" option is defined; the defined value is however
|
||||
forwarded to the "to" option. This can be used to rename options
|
||||
while providing backward compatibility. For example,
|
||||
|
||||
mkRenamedOptionModule [ "boot" "copyKernels" ] [ "boot" "loader" "grub" "copyKernels" ]
|
||||
|
||||
forwards any definitions of boot.copyKernels to
|
||||
boot.loader.grub.copyKernels while printing a warning.
|
||||
*/
|
||||
mkRenamedOptionModule = from: to: doRename {
|
||||
inherit from to;
|
||||
visible = false;
|
||||
warn = true;
|
||||
use = builtins.trace "Obsolete option `${showOption from}' is used. It was renamed to `${showOption to}'.";
|
||||
};
|
||||
|
||||
/* Return a module that causes a warning to be shown if any of the "from"
|
||||
option is defined; the defined values can be used in the "mergeFn" to set
|
||||
the "to" value.
|
||||
This function can be used to merge multiple options into one that has a
|
||||
different type.
|
||||
|
||||
"mergeFn" takes the module "config" as a parameter and must return a value
|
||||
of "to" option type.
|
||||
|
||||
mkMergedOptionModule
|
||||
[ [ "a" "b" "c" ]
|
||||
[ "d" "e" "f" ] ]
|
||||
[ "x" "y" "z" ]
|
||||
(config:
|
||||
let value = p: getAttrFromPath p config;
|
||||
in
|
||||
if (value [ "a" "b" "c" ]) == true then "foo"
|
||||
else if (value [ "d" "e" "f" ]) == true then "bar"
|
||||
else "baz")
|
||||
|
||||
- options.a.b.c is a removed boolean option
|
||||
- options.d.e.f is a removed boolean option
|
||||
- options.x.y.z is a new str option that combines a.b.c and d.e.f
|
||||
functionality
|
||||
|
||||
This show a warning if any a.b.c or d.e.f is set, and set the value of
|
||||
x.y.z to the result of the merge function
|
||||
*/
|
||||
mkMergedOptionModule = from: to: mergeFn:
|
||||
{ config, options, ... }:
|
||||
{
|
||||
options = foldl recursiveUpdate {} (map (path: setAttrByPath path (mkOption {
|
||||
visible = false;
|
||||
# To use the value in mergeFn without triggering errors
|
||||
default = "_mkMergedOptionModule";
|
||||
})) from);
|
||||
|
||||
config = {
|
||||
warnings = filter (x: x != "") (map (f:
|
||||
let val = getAttrFromPath f config;
|
||||
opt = getAttrFromPath f options;
|
||||
in
|
||||
optionalString
|
||||
(val != "_mkMergedOptionModule")
|
||||
"The option `${showOption f}' defined in ${showFiles opt.files} has been changed to `${showOption to}' that has a different type. Please read `${showOption to}' documentation and update your configuration accordingly."
|
||||
) from);
|
||||
} // setAttrByPath to (mkMerge
|
||||
(optional
|
||||
(any (f: (getAttrFromPath f config) != "_mkMergedOptionModule") from)
|
||||
(mergeFn config)));
|
||||
};
|
||||
|
||||
/* Single "from" version of mkMergedOptionModule.
|
||||
Return a module that causes a warning to be shown if the "from" option is
|
||||
defined; the defined value can be used in the "mergeFn" to set the "to"
|
||||
value.
|
||||
This function can be used to change an option into another that has a
|
||||
different type.
|
||||
|
||||
"mergeFn" takes the module "config" as a parameter and must return a value of
|
||||
"to" option type.
|
||||
|
||||
mkChangedOptionModule [ "a" "b" "c" ] [ "x" "y" "z" ]
|
||||
(config:
|
||||
let value = getAttrFromPath [ "a" "b" "c" ] config;
|
||||
in
|
||||
if value > 100 then "high"
|
||||
else "normal")
|
||||
|
||||
- options.a.b.c is a removed int option
|
||||
- options.x.y.z is a new str option that supersedes a.b.c
|
||||
|
||||
This show a warning if a.b.c is set, and set the value of x.y.z to the
|
||||
result of the change function
|
||||
*/
|
||||
mkChangedOptionModule = from: to: changeFn:
|
||||
mkMergedOptionModule [ from ] to changeFn;
|
||||
|
||||
/* Like ‘mkRenamedOptionModule’, but doesn't show a warning. */
|
||||
mkAliasOptionModule = from: to: doRename {
|
||||
inherit from to;
|
||||
visible = true;
|
||||
warn = false;
|
||||
use = id;
|
||||
};
|
||||
|
||||
doRename = { from, to, visible, warn, use }:
|
||||
let
|
||||
toOf = attrByPath to
|
||||
(abort "Renaming error: option `${showOption to}' does not exists.");
|
||||
in
|
||||
{ config, options, ... }:
|
||||
{ options = setAttrByPath from (mkOption {
|
||||
description = "Alias of <option>${showOption to}</option>.";
|
||||
apply = x: use (toOf config);
|
||||
});
|
||||
config = {
|
||||
warnings =
|
||||
let opt = getAttrFromPath from options; in
|
||||
optional (warn && opt.isDefined)
|
||||
"The option `${showOption from}' defined in ${showFiles opt.files} has been renamed to `${showOption to}'.";
|
||||
} // setAttrByPath to (mkAliasDefinitions (getAttrFromPath from options));
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ let lib = import ./default.nix; in
|
||||
|
||||
with import ./trivial.nix;
|
||||
with import ./lists.nix;
|
||||
with import ./misc.nix;
|
||||
with import ./attrsets.nix;
|
||||
with import ./strings.nix;
|
||||
|
||||
@@ -19,7 +20,6 @@ rec {
|
||||
, apply ? null # Function that converts the option value to something else.
|
||||
, internal ? null # Whether the option is for NixOS developers only.
|
||||
, visible ? null # Whether the option shows up in the manual.
|
||||
, readOnly ? null # Whether the option can be set only once
|
||||
, options ? null # Obsolete, used by types.optionSet.
|
||||
} @ attrs:
|
||||
attrs // { _type = "option"; };
|
||||
@@ -53,27 +53,32 @@ rec {
|
||||
if length list == 1 then head list
|
||||
else if all isFunction list then x: mergeDefaultOption loc (map (f: f x) list)
|
||||
else if all isList list then concatLists list
|
||||
else if all isAttrs list then foldl' lib.mergeAttrs {} list
|
||||
else if all isBool list then foldl' lib.or false list
|
||||
else if all isAttrs list then fold lib.mergeAttrs {} list
|
||||
else if all isBool list then fold lib.or false list
|
||||
else if all isString list then lib.concatStrings list
|
||||
else if all isInt list && all (x: x == head list) list then head list
|
||||
else throw "Cannot merge definitions of `${showOption loc}' given in ${showFiles (getFiles defs)}.";
|
||||
|
||||
/* Obsolete, will remove soon. Specify an option type or apply
|
||||
function instead. */
|
||||
mergeTypedOption = typeName: predicate: merge: loc: list:
|
||||
let list' = map (x: x.value) list; in
|
||||
if all predicate list then merge list'
|
||||
else throw "Expected a ${typeName}.";
|
||||
|
||||
mergeEnableOption = mergeTypedOption "boolean"
|
||||
(x: true == x || false == x) (fold lib.or false);
|
||||
|
||||
mergeListOption = mergeTypedOption "list" isList concatLists;
|
||||
|
||||
mergeStringOption = mergeTypedOption "string" isString lib.concatStrings;
|
||||
|
||||
mergeOneOption = loc: defs:
|
||||
if defs == [] then abort "This case should never happen."
|
||||
else if length defs != 1 then
|
||||
throw "The unique option `${showOption loc}' is defined multiple times, in ${showFiles (getFiles defs)}."
|
||||
else (head defs).value;
|
||||
|
||||
/* "Merge" option definitions by checking that they all have the same value. */
|
||||
mergeEqualOption = loc: defs:
|
||||
if defs == [] then abort "This case should never happen."
|
||||
else foldl' (val: def:
|
||||
if def.value != val then
|
||||
throw "The option `${showOption loc}' has conflicting definitions, in ${showFiles (getFiles defs)}."
|
||||
else
|
||||
val) (head defs).value defs;
|
||||
|
||||
getValues = map (x: x.value);
|
||||
getFiles = map (x: x.file);
|
||||
|
||||
@@ -83,7 +88,7 @@ rec {
|
||||
optionAttrSetToDocList = optionAttrSetToDocList' [];
|
||||
|
||||
optionAttrSetToDocList' = prefix: options:
|
||||
concatMap (opt:
|
||||
fold (opt: rest:
|
||||
let
|
||||
docOption = rec {
|
||||
name = showOption opt.loc;
|
||||
@@ -91,8 +96,6 @@ rec {
|
||||
declarations = filter (x: x != unknownModule) opt.declarations;
|
||||
internal = opt.internal or false;
|
||||
visible = opt.visible or true;
|
||||
readOnly = opt.readOnly or false;
|
||||
type = opt.type.description or null;
|
||||
}
|
||||
// (if opt ? example then { example = scrubOptionValue opt.example; } else {})
|
||||
// (if opt ? default then { default = scrubOptionValue opt.default; } else {})
|
||||
@@ -102,7 +105,8 @@ rec {
|
||||
let ss = opt.type.getSubOptions opt.loc;
|
||||
in if ss != {} then optionAttrSetToDocList' opt.loc ss else [];
|
||||
in
|
||||
[ docOption ] ++ subOptions) (collect isOption options);
|
||||
# FIXME: expensive, O(n^2)
|
||||
[ docOption ] ++ subOptions ++ rest) [] (collect isOption options);
|
||||
|
||||
|
||||
/* This function recursively removes all derivation attributes from
|
||||
|
||||
@@ -1,24 +1,16 @@
|
||||
let lists = import ./lists.nix; in
|
||||
|
||||
rec {
|
||||
all = linux ++ darwin ++ cygwin ++ freebsd ++ openbsd ++ netbsd ++ illumos;
|
||||
allBut = platforms: lists.filter (x: !(builtins.elem x platforms)) all;
|
||||
none = [];
|
||||
|
||||
arm = ["armv5tel-linux" "armv6l-linux" "armv7l-linux" ];
|
||||
i686 = ["i686-linux" "i686-freebsd" "i686-netbsd" "i686-cygwin"];
|
||||
mips = [ "mips64el-linux" ];
|
||||
x86_64 = ["x86_64-linux" "x86_64-darwin" "x86_64-freebsd" "x86_64-openbsd" "x86_64-netbsd" "x86_64-cygwin"];
|
||||
|
||||
cygwin = ["i686-cygwin" "x86_64-cygwin"];
|
||||
gnu = linux; /* ++ hurd ++ kfreebsd ++ ... */
|
||||
linux = ["i686-linux" "x86_64-linux" "armv5tel-linux" "armv6l-linux" "armv7l-linux" "mips64el-linux"];
|
||||
darwin = ["x86_64-darwin"];
|
||||
freebsd = ["i686-freebsd" "x86_64-freebsd"];
|
||||
gnu = linux; /* ++ hurd ++ kfreebsd ++ ... */
|
||||
illumos = ["x86_64-solaris"];
|
||||
linux = ["i686-linux" "x86_64-linux" "armv5tel-linux" "armv6l-linux" "armv7l-linux" "mips64el-linux"];
|
||||
netbsd = ["i686-netbsd" "x86_64-netbsd"];
|
||||
openbsd = ["i686-openbsd" "x86_64-openbsd"];
|
||||
unix = linux ++ darwin ++ freebsd ++ openbsd ++ netbsd ++ illumos;
|
||||
|
||||
mesaPlatforms = ["i686-linux" "x86_64-linux" "x86_64-darwin" "armv5tel-linux" "armv6l-linux" "armv7l-linux"];
|
||||
netbsd = ["i686-netbsd" "x86_64-netbsd"];
|
||||
cygwin = ["i686-cygwin" "x86_64-cygwin"];
|
||||
unix = linux ++ darwin ++ freebsd ++ openbsd;
|
||||
all = linux ++ darwin ++ cygwin ++ freebsd ++ openbsd;
|
||||
none = [];
|
||||
allBut = platforms: lists.filter (x: !(builtins.elem x platforms)) all;
|
||||
mesaPlatforms = ["i686-linux" "x86_64-linux" "x86_64-darwin" "armv5tel-linux" "armv6l-linux"];
|
||||
}
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
with import ./strings.nix;
|
||||
|
||||
/* Helpers for creating lisp S-exprs for the Apple sandbox
|
||||
|
||||
lib.sandbox.allowFileRead [ "/usr/bin/file" ];
|
||||
# => "(allow file-read* (literal \"/usr/bin/file\"))";
|
||||
|
||||
lib.sandbox.allowFileRead {
|
||||
literal = [ "/usr/bin/file" ];
|
||||
subpath = [ "/usr/lib/system" ];
|
||||
}
|
||||
# => "(allow file-read* (literal \"/usr/bin/file\") (subpath \"/usr/lib/system\"))"
|
||||
*/
|
||||
|
||||
let
|
||||
|
||||
sexp = tokens: "(" + builtins.concatStringsSep " " tokens + ")";
|
||||
generateFileList = files:
|
||||
if builtins.isList files
|
||||
then concatMapStringsSep " " (x: sexp [ "literal" ''"${x}"'' ]) files
|
||||
else if builtins.isString files
|
||||
then generateFileList [ files ]
|
||||
else concatStringsSep " " (
|
||||
(map (x: sexp [ "literal" ''"${x}"'' ]) (files.literal or [])) ++
|
||||
(map (x: sexp [ "subpath" ''"${x}"'' ]) (files.subpath or []))
|
||||
);
|
||||
applyToFiles = f: act: files: f "${act} ${generateFileList files}";
|
||||
genActions = actionName: let
|
||||
action = feature: sexp [ actionName feature ];
|
||||
self = {
|
||||
"${actionName}" = action;
|
||||
"${actionName}File" = applyToFiles action "file*";
|
||||
"${actionName}FileRead" = applyToFiles action "file-read*";
|
||||
"${actionName}FileReadMetadata" = applyToFiles action "file-read-metadata";
|
||||
"${actionName}DirectoryList" = self."${actionName}FileReadMetadata";
|
||||
"${actionName}FileWrite" = applyToFiles action "file-write*";
|
||||
"${actionName}FileWriteMetadata" = applyToFiles action "file-write-metadata";
|
||||
};
|
||||
in self;
|
||||
|
||||
in
|
||||
|
||||
genActions "allow" // genActions "deny" // {
|
||||
importProfile = derivation: ''
|
||||
(import "${derivation}")
|
||||
'';
|
||||
}
|
||||
@@ -4,27 +4,20 @@ let lib = import ./default.nix; in
|
||||
|
||||
rec {
|
||||
|
||||
# Returns the type of a path: regular (for file), symlink, or directory
|
||||
pathType = p: with builtins; getAttr (baseNameOf p) (readDir (dirOf p));
|
||||
|
||||
# Returns true if the path exists and is a directory, false otherwise
|
||||
pathIsDirectory = p: if builtins.pathExists p then (pathType p) == "directory" else false;
|
||||
|
||||
# Bring in a path as a source, filtering out all Subversion and CVS
|
||||
# directories, as well as backup files (*~).
|
||||
cleanSourceFilter = name: type: let baseName = baseNameOf (toString name); in ! (
|
||||
# Filter out Subversion and CVS directories.
|
||||
(type == "directory" && (baseName == ".git" || baseName == ".svn" || baseName == "CVS" || baseName == ".hg")) ||
|
||||
# Filter out backup files.
|
||||
lib.hasSuffix "~" baseName ||
|
||||
# Filter out generates files.
|
||||
lib.hasSuffix ".o" baseName ||
|
||||
lib.hasSuffix ".so" baseName ||
|
||||
# Filter out nix-build result symlinks
|
||||
(type == "symlink" && lib.hasPrefix "result" baseName)
|
||||
);
|
||||
|
||||
cleanSource = builtins.filterSource cleanSourceFilter;
|
||||
cleanSource =
|
||||
let filter = name: type: let baseName = baseNameOf (toString name); in ! (
|
||||
# Filter out Subversion and CVS directories.
|
||||
(type == "directory" && (baseName == ".git" || baseName == ".svn" || baseName == "CVS" || baseName == ".hg")) ||
|
||||
# Filter out backup files.
|
||||
lib.hasSuffix "~" baseName ||
|
||||
# Filter out generates files.
|
||||
lib.hasSuffix ".o" baseName ||
|
||||
lib.hasSuffix ".so" baseName
|
||||
);
|
||||
in src: builtins.filterSource filter src;
|
||||
|
||||
|
||||
# Get all files ending with the specified suffices from the given
|
||||
@@ -36,32 +29,4 @@ rec {
|
||||
in type == "directory" || lib.any (ext: lib.hasSuffix ext base) exts;
|
||||
in builtins.filterSource filter path;
|
||||
|
||||
|
||||
# Get the commit id of a git repo
|
||||
# Example: commitIdFromGitRepo <nixpkgs/.git>
|
||||
commitIdFromGitRepo =
|
||||
let readCommitFromFile = path: file:
|
||||
with builtins;
|
||||
let fileName = toString path + "/" + file;
|
||||
packedRefsName = toString path + "/packed-refs";
|
||||
in if lib.pathExists fileName
|
||||
then
|
||||
let fileContent = lib.fileContents fileName;
|
||||
# Sometimes git stores the commitId directly in the file but
|
||||
# sometimes it stores something like: «ref: refs/heads/branch-name»
|
||||
matchRef = match "^ref: (.*)$" fileContent;
|
||||
in if isNull matchRef
|
||||
then fileContent
|
||||
else readCommitFromFile path (lib.head matchRef)
|
||||
# Sometimes, the file isn't there at all and has been packed away in the
|
||||
# packed-refs file, so we have to grep through it:
|
||||
else if lib.pathExists packedRefsName
|
||||
then
|
||||
let fileContent = readFile packedRefsName;
|
||||
matchRef = match (".*\n([^\n ]*) " + file + "\n.*") fileContent;
|
||||
in if isNull matchRef
|
||||
then throw ("Could not find " + file + " in " + packedRefsName)
|
||||
else lib.head matchRef
|
||||
else throw ("Not a .git directory: " + path);
|
||||
in lib.flip readCommitFromFile "HEAD";
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ Usage:
|
||||
Attention:
|
||||
|
||||
let
|
||||
pkgs = (import <nixpkgs>) {};
|
||||
pkgs = (import /etc/nixos/nixpkgs/pkgs/top-level/all-packages.nix) {};
|
||||
in let
|
||||
inherit (pkgs.stringsWithDeps) fullDepEntry packEntry noDepEntry textClosureMap;
|
||||
inherit (pkgs.lib) id;
|
||||
|
||||
460
lib/strings.nix
460
lib/strings.nix
@@ -8,277 +8,131 @@ in
|
||||
|
||||
rec {
|
||||
|
||||
inherit (builtins) stringLength substring head tail isString replaceStrings;
|
||||
inherit (builtins) stringLength substring head tail isString;
|
||||
|
||||
/* Concatenate a list of strings.
|
||||
|
||||
Example:
|
||||
concatStrings ["foo" "bar"]
|
||||
=> "foobar"
|
||||
*/
|
||||
concatStrings = builtins.concatStringsSep "";
|
||||
# Concatenate a list of strings.
|
||||
concatStrings = lib.fold (x: y: x + y) "";
|
||||
|
||||
/* Map a function over a list and concatenate the resulting strings.
|
||||
|
||||
Example:
|
||||
concatMapStrings (x: "a" + x) ["foo" "bar"]
|
||||
=> "afooabar"
|
||||
*/
|
||||
# Map a function over a list and concatenate the resulting strings.
|
||||
concatMapStrings = f: list: concatStrings (map f list);
|
||||
|
||||
/* Like `concatMapStrings' except that the f functions also gets the
|
||||
position as a parameter.
|
||||
|
||||
Example:
|
||||
concatImapStrings (pos: x: "${toString pos}-${x}") ["foo" "bar"]
|
||||
=> "1-foo2-bar"
|
||||
*/
|
||||
concatImapStrings = f: list: concatStrings (lib.imap f list);
|
||||
|
||||
/* Place an element between each element of a list
|
||||
|
||||
Example:
|
||||
intersperse "/" ["usr" "local" "bin"]
|
||||
=> ["usr" "/" "local" "/" "bin"].
|
||||
*/
|
||||
# Place an element between each element of a list, e.g.,
|
||||
# `intersperse "," ["a" "b" "c"]' returns ["a" "," "b" "," "c"].
|
||||
intersperse = separator: list:
|
||||
if list == [] || length list == 1
|
||||
then list
|
||||
else tail (lib.concatMap (x: [separator x]) list);
|
||||
else [(head list) separator]
|
||||
++ (intersperse separator (tail list));
|
||||
|
||||
/* Concatenate a list of strings with a separator between each element
|
||||
|
||||
Example:
|
||||
concatStringsSep "/" ["usr" "local" "bin"]
|
||||
=> "usr/local/bin"
|
||||
*/
|
||||
concatStringsSep = builtins.concatStringsSep or (separator: list:
|
||||
concatStrings (intersperse separator list));
|
||||
# Concatenate a list of strings with a separator between each element, e.g.
|
||||
# concatStringsSep " " ["foo" "bar" "xyzzy"] == "foo bar xyzzy"
|
||||
concatStringsSep = separator: list:
|
||||
concatStrings (intersperse separator list);
|
||||
|
||||
/* First maps over the list and then concatenates it.
|
||||
|
||||
Example:
|
||||
concatMapStringsSep "-" (x: toUpper x) ["foo" "bar" "baz"]
|
||||
=> "FOO-BAR-BAZ"
|
||||
*/
|
||||
concatMapStringsSep = sep: f: list: concatStringsSep sep (map f list);
|
||||
|
||||
/* First imaps over the list and then concatenates it.
|
||||
|
||||
Example:
|
||||
|
||||
concatImapStringsSep "-" (pos: x: toString (x / pos)) [ 6 6 6 ]
|
||||
=> "6-3-2"
|
||||
*/
|
||||
concatImapStringsSep = sep: f: list: concatStringsSep sep (lib.imap f list);
|
||||
|
||||
/* Construct a Unix-style search path consisting of each `subDir"
|
||||
directory of the given list of packages.
|
||||
|
||||
Example:
|
||||
makeSearchPath "bin" ["/root" "/usr" "/usr/local"]
|
||||
=> "/root/bin:/usr/bin:/usr/local/bin"
|
||||
makeSearchPath "bin" ["/"]
|
||||
=> "//bin"
|
||||
*/
|
||||
# Construct a Unix-style search path consisting of each `subDir"
|
||||
# directory of the given list of packages. For example,
|
||||
# `makeSearchPath "bin" ["x" "y" "z"]' returns "x/bin:y/bin:z/bin".
|
||||
makeSearchPath = subDir: packages:
|
||||
concatStringsSep ":" (map (path: path + "/" + subDir) packages);
|
||||
|
||||
/* Construct a Unix-style search path, using given package output.
|
||||
If no output is found, fallback to `.out` and then to the default.
|
||||
|
||||
Example:
|
||||
makeSearchPathOutput "dev" "bin" [ pkgs.openssl pkgs.zlib ]
|
||||
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev/bin:/nix/store/wwh7mhwh269sfjkm6k5665b5kgp7jrk2-zlib-1.2.8/bin"
|
||||
*/
|
||||
makeSearchPathOutput = output: subDir: pkgs: makeSearchPath subDir (map (lib.getOutput output) pkgs);
|
||||
|
||||
/* Construct a library search path (such as RPATH) containing the
|
||||
libraries for a set of packages
|
||||
|
||||
Example:
|
||||
makeLibraryPath [ "/usr" "/usr/local" ]
|
||||
=> "/usr/lib:/usr/local/lib"
|
||||
pkgs = import <nixpkgs> { }
|
||||
makeLibraryPath [ pkgs.openssl pkgs.zlib ]
|
||||
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r/lib:/nix/store/wwh7mhwh269sfjkm6k5665b5kgp7jrk2-zlib-1.2.8/lib"
|
||||
*/
|
||||
makeLibraryPath = makeSearchPathOutput "lib" "lib";
|
||||
|
||||
/* Construct a binary search path (such as $PATH) containing the
|
||||
binaries for a set of packages.
|
||||
|
||||
Example:
|
||||
makeBinPath ["/root" "/usr" "/usr/local"]
|
||||
=> "/root/bin:/usr/bin:/usr/local/bin"
|
||||
*/
|
||||
makeBinPath = makeSearchPathOutput "bin" "bin";
|
||||
# Construct a library search path (such as RPATH) containing the
|
||||
# libraries for a set of packages, e.g. "${pkg1}/lib:${pkg2}/lib:...".
|
||||
makeLibraryPath = makeSearchPath "lib";
|
||||
|
||||
|
||||
/* Construct a perl search path (such as $PERL5LIB)
|
||||
# Idem for Perl search paths.
|
||||
makePerlPath = makeSearchPath "lib/perl5/site_perl";
|
||||
|
||||
FIXME(zimbatm): this should be moved in perl-specific code
|
||||
|
||||
Example:
|
||||
pkgs = import <nixpkgs> { }
|
||||
makePerlPath [ pkgs.perlPackages.NetSMTP ]
|
||||
=> "/nix/store/n0m1fk9c960d8wlrs62sncnadygqqc6y-perl-Net-SMTP-1.25/lib/perl5/site_perl"
|
||||
*/
|
||||
makePerlPath = makeSearchPathOutput "lib" "lib/perl5/site_perl";
|
||||
|
||||
/* Dependening on the boolean `cond', return either the given string
|
||||
or the empty string. Useful to contatenate against a bigger string.
|
||||
|
||||
Example:
|
||||
optionalString true "some-string"
|
||||
=> "some-string"
|
||||
optionalString false "some-string"
|
||||
=> ""
|
||||
*/
|
||||
# Dependening on the boolean `cond', return either the given string
|
||||
# or the empty string.
|
||||
optionalString = cond: string: if cond then string else "";
|
||||
|
||||
/* Determine whether a string has given prefix.
|
||||
|
||||
Example:
|
||||
hasPrefix "foo" "foobar"
|
||||
=> true
|
||||
hasPrefix "foo" "barfoo"
|
||||
=> false
|
||||
*/
|
||||
# Determine whether a string has given prefix/suffix.
|
||||
hasPrefix = pref: str:
|
||||
substring 0 (stringLength pref) str == pref;
|
||||
|
||||
/* Determine whether a string has given suffix.
|
||||
|
||||
Example:
|
||||
hasSuffix "foo" "foobar"
|
||||
=> false
|
||||
hasSuffix "foo" "barfoo"
|
||||
=> true
|
||||
*/
|
||||
hasSuffix = suffix: content:
|
||||
eqStrings (substring 0 (stringLength pref) str) pref;
|
||||
hasSuffix = suff: str:
|
||||
let
|
||||
lenContent = stringLength content;
|
||||
lenSuffix = stringLength suffix;
|
||||
in lenContent >= lenSuffix &&
|
||||
substring (lenContent - lenSuffix) lenContent content == suffix;
|
||||
lenStr = stringLength str;
|
||||
lenSuff = stringLength suff;
|
||||
in lenStr >= lenSuff &&
|
||||
eqStrings (substring (lenStr - lenSuff) lenStr str) suff;
|
||||
|
||||
/* Convert a string to a list of characters (i.e. singleton strings).
|
||||
This allows you to, e.g., map a function over each character. However,
|
||||
note that this will likely be horribly inefficient; Nix is not a
|
||||
general purpose programming language. Complex string manipulations
|
||||
should, if appropriate, be done in a derivation.
|
||||
Also note that Nix treats strings as a list of bytes and thus doesn't
|
||||
handle unicode.
|
||||
|
||||
Example:
|
||||
stringToCharacters ""
|
||||
=> [ ]
|
||||
stringToCharacters "abc"
|
||||
=> [ "a" "b" "c" ]
|
||||
stringToCharacters "💩"
|
||||
=> [ "<EFBFBD>" "<EFBFBD>" "<EFBFBD>" "<EFBFBD>" ]
|
||||
*/
|
||||
stringToCharacters = s:
|
||||
map (p: substring p 1 s) (lib.range 0 (stringLength s - 1));
|
||||
# Convert a string to a list of characters (i.e. singleton strings).
|
||||
# For instance, "abc" becomes ["a" "b" "c"]. This allows you to,
|
||||
# e.g., map a function over each character. However, note that this
|
||||
# will likely be horribly inefficient; Nix is not a general purpose
|
||||
# programming language. Complex string manipulations should, if
|
||||
# appropriate, be done in a derivation.
|
||||
stringToCharacters = s: let l = stringLength s; in
|
||||
if l == 0
|
||||
then []
|
||||
else map (p: substring p 1 s) (lib.range 0 (l - 1));
|
||||
|
||||
/* Manipulate a string character by character and replace them by
|
||||
strings before concatenating the results.
|
||||
|
||||
Example:
|
||||
stringAsChars (x: if x == "a" then "i" else x) "nax"
|
||||
=> "nix"
|
||||
*/
|
||||
# Manipulate a string charcater by character and replace them by strings
|
||||
# before concatenating the results.
|
||||
stringAsChars = f: s:
|
||||
concatStrings (
|
||||
map f (stringToCharacters s)
|
||||
);
|
||||
|
||||
/* Escape occurrence of the elements of ‘list’ in ‘string’ by
|
||||
prefixing it with a backslash.
|
||||
|
||||
Example:
|
||||
escape ["(" ")"] "(foo)"
|
||||
=> "\\(foo\\)"
|
||||
*/
|
||||
escape = list: replaceChars list (map (c: "\\${c}") list);
|
||||
# same as vim escape function.
|
||||
# Each character contained in list is prefixed by "\"
|
||||
escape = list : string :
|
||||
stringAsChars (c: if lib.elem c list then "\\${c}" else c) string;
|
||||
|
||||
/* Quote string to be used safely within the Bourne shell.
|
||||
|
||||
Example:
|
||||
escapeShellArg "esc'ape\nme"
|
||||
=> "'esc'\\''ape\nme'"
|
||||
*/
|
||||
escapeShellArg = arg: "'${replaceStrings ["'"] ["'\\''"] (toString arg)}'";
|
||||
# still ugly slow. But more correct now
|
||||
# [] for zsh
|
||||
escapeShellArg = lib.escape (stringToCharacters "\\ ';$`()|<>\t*[]");
|
||||
|
||||
/* Quote all arguments to be safely passed to the Bourne shell.
|
||||
|
||||
Example:
|
||||
escapeShellArgs ["one" "two three" "four'five"]
|
||||
=> "'one' 'two three' 'four'\\''five'"
|
||||
*/
|
||||
escapeShellArgs = concatMapStringsSep " " escapeShellArg;
|
||||
|
||||
/* Obsolete - use replaceStrings instead. */
|
||||
replaceChars = builtins.replaceStrings or (
|
||||
del: new: s:
|
||||
# replace characters by their substitutes. This function is equivalent to
|
||||
# the `tr' command except that one character can be replace by multiple
|
||||
# ones. e.g.,
|
||||
# replaceChars ["<" ">"] ["<" ">"] "<foo>" returns "<foo>".
|
||||
replaceChars = del: new: s:
|
||||
let
|
||||
substList = lib.zipLists del new;
|
||||
subst = c:
|
||||
let found = lib.findFirst (sub: sub.fst == c) null substList; in
|
||||
if found == null then
|
||||
c
|
||||
else
|
||||
found.snd;
|
||||
(lib.fold
|
||||
(sub: res: if sub.fst == c then sub else res)
|
||||
{fst = c; snd = c;} (lib.zipLists del new)
|
||||
).snd;
|
||||
in
|
||||
stringAsChars subst s);
|
||||
stringAsChars subst s;
|
||||
|
||||
# Case conversion utilities.
|
||||
|
||||
# Case conversion utilities
|
||||
lowerChars = stringToCharacters "abcdefghijklmnopqrstuvwxyz";
|
||||
upperChars = stringToCharacters "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
|
||||
/* Converts an ASCII string to lower-case.
|
||||
|
||||
Example:
|
||||
toLower "HOME"
|
||||
=> "home"
|
||||
*/
|
||||
toLower = replaceChars upperChars lowerChars;
|
||||
|
||||
/* Converts an ASCII string to upper-case.
|
||||
|
||||
Example:
|
||||
toUpper "home"
|
||||
=> "HOME"
|
||||
*/
|
||||
toUpper = replaceChars lowerChars upperChars;
|
||||
|
||||
/* Appends string context from another string. This is an implementation
|
||||
detail of Nix.
|
||||
|
||||
Strings in Nix carry an invisible `context' which is a list of strings
|
||||
representing store paths. If the string is later used in a derivation
|
||||
attribute, the derivation will properly populate the inputDrvs and
|
||||
inputSrcs.
|
||||
|
||||
Example:
|
||||
pkgs = import <nixpkgs> { };
|
||||
addContextFrom pkgs.coreutils "bar"
|
||||
=> "bar"
|
||||
*/
|
||||
# Appends string context from another string
|
||||
addContextFrom = a: b: substring 0 0 a + b;
|
||||
|
||||
/* Cut a string with a separator and produces a list of strings which
|
||||
were separated by this separator.
|
||||
# Compares strings not requiring context equality
|
||||
# Obviously, a workaround but works on all Nix versions
|
||||
eqStrings = a: b: addContextFrom b a == addContextFrom a b;
|
||||
|
||||
NOTE: this function is not performant and should never be used.
|
||||
|
||||
Example:
|
||||
splitString "." "foo.bar.baz"
|
||||
=> [ "foo" "bar" "baz" ]
|
||||
splitString "/" "/usr/local/bin"
|
||||
=> [ "" "usr" "local" "bin" ]
|
||||
*/
|
||||
# Cut a string with a separator and produces a list of strings which were
|
||||
# separated by this separator. e.g.,
|
||||
# `splitString "." "foo.bar.baz"' returns ["foo" "bar" "baz"].
|
||||
splitString = _sep: _s:
|
||||
let
|
||||
sep = addContextFrom _s _sep;
|
||||
@@ -302,15 +156,10 @@ rec {
|
||||
in
|
||||
recurse 0 0;
|
||||
|
||||
/* Return the suffix of the second argument if the first argument matches
|
||||
its prefix.
|
||||
|
||||
Example:
|
||||
removePrefix "foo." "foo.bar.baz"
|
||||
=> "bar.baz"
|
||||
removePrefix "xxx" "foo.bar.baz"
|
||||
=> "foo.bar.baz"
|
||||
*/
|
||||
# return the suffix of the second argument if the first argument match its
|
||||
# prefix. e.g.,
|
||||
# `removePrefix "foo." "foo.bar.baz"' returns "bar.baz".
|
||||
removePrefix = pre: s:
|
||||
let
|
||||
preLen = stringLength pre;
|
||||
@@ -321,177 +170,42 @@ rec {
|
||||
else
|
||||
s;
|
||||
|
||||
/* Return the prefix of the second argument if the first argument matches
|
||||
its suffix.
|
||||
|
||||
Example:
|
||||
removeSuffix "front" "homefront"
|
||||
=> "home"
|
||||
removeSuffix "xxx" "homefront"
|
||||
=> "homefront"
|
||||
*/
|
||||
removeSuffix = suf: s:
|
||||
let
|
||||
sufLen = stringLength suf;
|
||||
sLen = stringLength s;
|
||||
in
|
||||
if sufLen <= sLen && suf == substring (sLen - sufLen) sufLen s then
|
||||
if sufLen <= sLen && eqStrings suf (substring (sLen - sufLen) sufLen s) then
|
||||
substring 0 (sLen - sufLen) s
|
||||
else
|
||||
s;
|
||||
|
||||
/* Return true iff string v1 denotes a version older than v2.
|
||||
|
||||
Example:
|
||||
versionOlder "1.1" "1.2"
|
||||
=> true
|
||||
versionOlder "1.1" "1.1"
|
||||
=> false
|
||||
*/
|
||||
# Return true iff string v1 denotes a version older than v2.
|
||||
versionOlder = v1: v2: builtins.compareVersions v2 v1 == 1;
|
||||
|
||||
/* Return true iff string v1 denotes a version equal to or newer than v2.
|
||||
|
||||
Example:
|
||||
versionAtLeast "1.1" "1.0"
|
||||
=> true
|
||||
versionAtLeast "1.1" "1.1"
|
||||
=> true
|
||||
versionAtLeast "1.1" "1.2"
|
||||
=> false
|
||||
*/
|
||||
# Return true iff string v1 denotes a version equal to or newer than v2.
|
||||
versionAtLeast = v1: v2: !versionOlder v1 v2;
|
||||
|
||||
/* This function takes an argument that's either a derivation or a
|
||||
derivation's "name" attribute and extracts the version part from that
|
||||
argument.
|
||||
|
||||
Example:
|
||||
getVersion "youtube-dl-2016.01.01"
|
||||
=> "2016.01.01"
|
||||
getVersion pkgs.youtube-dl
|
||||
=> "2016.01.01"
|
||||
*/
|
||||
getVersion = x:
|
||||
let
|
||||
parse = drv: (builtins.parseDrvName drv).version;
|
||||
in if isString x
|
||||
then parse x
|
||||
else x.version or (parse x.name);
|
||||
# Get the version of the specified derivation, as specified in its
|
||||
# ‘name’ attribute.
|
||||
getVersion = drv: (builtins.parseDrvName drv.name).version;
|
||||
|
||||
/* Extract name with version from URL. Ask for separator which is
|
||||
supposed to start extension.
|
||||
|
||||
Example:
|
||||
nameFromURL "https://nixos.org/releases/nix/nix-1.7/nix-1.7-x86_64-linux.tar.bz2" "-"
|
||||
=> "nix"
|
||||
nameFromURL "https://nixos.org/releases/nix/nix-1.7/nix-1.7-x86_64-linux.tar.bz2" "_"
|
||||
=> "nix-1.7-x86"
|
||||
*/
|
||||
nameFromURL = url: sep:
|
||||
let
|
||||
components = splitString "/" url;
|
||||
filename = lib.last components;
|
||||
name = builtins.head (splitString sep filename);
|
||||
in assert name != filename; name;
|
||||
# Extract name with version from URL. Ask for separator which is
|
||||
# supposed to start extension
|
||||
nameFromURL = url: sep: let
|
||||
components = splitString "/" url;
|
||||
filename = lib.last components;
|
||||
name = builtins.head (splitString sep filename);
|
||||
in
|
||||
assert ! eqStrings name filename;
|
||||
name;
|
||||
|
||||
/* Create an --{enable,disable}-<feat> string that can be passed to
|
||||
standard GNU Autoconf scripts.
|
||||
|
||||
Example:
|
||||
enableFeature true "shared"
|
||||
=> "--enable-shared"
|
||||
enableFeature false "shared"
|
||||
=> "--disable-shared"
|
||||
*/
|
||||
# Create an --{enable,disable}-<feat> string that can be passed to
|
||||
# standard GNU Autoconf scripts.
|
||||
enableFeature = enable: feat: "--${if enable then "enable" else "disable"}-${feat}";
|
||||
|
||||
/* Create a fixed width string with additional prefix to match
|
||||
required width.
|
||||
|
||||
Example:
|
||||
fixedWidthString 5 "0" (toString 15)
|
||||
=> "00015"
|
||||
*/
|
||||
fixedWidthString = width: filler: str:
|
||||
let
|
||||
strw = lib.stringLength str;
|
||||
reqWidth = width - (lib.stringLength filler);
|
||||
in
|
||||
assert strw <= width;
|
||||
if strw == width then str else filler + fixedWidthString reqWidth filler str;
|
||||
|
||||
/* Format a number adding leading zeroes up to fixed width.
|
||||
|
||||
Example:
|
||||
fixedWidthNumber 5 15
|
||||
=> "00015"
|
||||
*/
|
||||
fixedWidthNumber = width: n: fixedWidthString width "0" (toString n);
|
||||
|
||||
/* Check whether a value is a store path.
|
||||
|
||||
Example:
|
||||
isStorePath "/nix/store/d945ibfx9x185xf04b890y4f9g3cbb63-python-2.7.11/bin/python"
|
||||
=> false
|
||||
isStorePath "/nix/store/d945ibfx9x185xf04b890y4f9g3cbb63-python-2.7.11/"
|
||||
=> true
|
||||
isStorePath pkgs.python
|
||||
=> true
|
||||
*/
|
||||
isStorePath = x: builtins.substring 0 1 (toString x) == "/" && dirOf (builtins.toPath x) == builtins.storeDir;
|
||||
|
||||
/* Convert string to int
|
||||
Obviously, it is a bit hacky to use fromJSON that way.
|
||||
|
||||
Example:
|
||||
toInt "1337"
|
||||
=> 1337
|
||||
toInt "-4"
|
||||
=> -4
|
||||
toInt "3.14"
|
||||
=> error: floating point JSON numbers are not supported
|
||||
*/
|
||||
toInt = str:
|
||||
let may_be_int = builtins.fromJSON str; in
|
||||
if builtins.isInt may_be_int
|
||||
then may_be_int
|
||||
else throw "Could not convert ${str} to int.";
|
||||
|
||||
/* Read a list of paths from `file', relative to the `rootPath'. Lines
|
||||
beginning with `#' are treated as comments and ignored. Whitespace
|
||||
is significant.
|
||||
|
||||
NOTE: this function is not performant and should be avoided
|
||||
|
||||
Example:
|
||||
readPathsFromFile /prefix
|
||||
./pkgs/development/libraries/qt-5/5.4/qtbase/series
|
||||
=> [ "/prefix/dlopen-resolv.patch" "/prefix/tzdir.patch"
|
||||
"/prefix/dlopen-libXcursor.patch" "/prefix/dlopen-openssl.patch"
|
||||
"/prefix/dlopen-dbus.patch" "/prefix/xdg-config-dirs.patch"
|
||||
"/prefix/nix-profiles-library-paths.patch"
|
||||
"/prefix/compose-search-path.patch" ]
|
||||
*/
|
||||
readPathsFromFile = rootPath: file:
|
||||
let
|
||||
root = toString rootPath;
|
||||
lines =
|
||||
builtins.map (lib.removeSuffix "\n")
|
||||
(lib.splitString "\n" (builtins.readFile file));
|
||||
removeComments = lib.filter (line: !(lib.hasPrefix "#" line));
|
||||
relativePaths = removeComments lines;
|
||||
absolutePaths = builtins.map (path: builtins.toPath (root + "/" + path)) relativePaths;
|
||||
in
|
||||
absolutePaths;
|
||||
|
||||
/* Read the contents of a file removing the trailing \n
|
||||
|
||||
Example:
|
||||
$ echo "1.0" > ./version
|
||||
|
||||
fileContents ./version
|
||||
=> "1.0"
|
||||
*/
|
||||
fileContents = file: removeSuffix "\n" (builtins.readFile file);
|
||||
}
|
||||
|
||||
@@ -84,10 +84,10 @@ rec {
|
||||
};
|
||||
|
||||
|
||||
is64Bit = matchAttrs { cpu = { bits = 64; }; };
|
||||
isDarwin = matchAttrs { kernel = kernels.darwin; };
|
||||
isi686 = matchAttrs { cpu = cpuTypes.i686; };
|
||||
isLinux = matchAttrs { kernel = kernels.linux; };
|
||||
isi686 = matchAttrs { cpu = cpuTypes.i686; };
|
||||
is64Bit = matchAttrs { cpu = { bits = 64; }; };
|
||||
|
||||
|
||||
# This should revert the job done by config.guess from the gcc compiler.
|
||||
|
||||
122
lib/tests.nix
122
lib/tests.nix
@@ -7,7 +7,7 @@ runTests {
|
||||
expr = id 1;
|
||||
expected = 1;
|
||||
};
|
||||
|
||||
|
||||
testConst = {
|
||||
expr = const 2 3;
|
||||
expected = 2;
|
||||
@@ -19,12 +19,12 @@ runTests {
|
||||
expected = true;
|
||||
};
|
||||
*/
|
||||
|
||||
|
||||
testAnd = {
|
||||
expr = and true false;
|
||||
expected = false;
|
||||
};
|
||||
|
||||
|
||||
testFix = {
|
||||
expr = fix (x: {a = if x ? a then "a" else "b";});
|
||||
expected = {a = "a";};
|
||||
@@ -67,7 +67,7 @@ runTests {
|
||||
};
|
||||
|
||||
testOverridableDelayableArgsTest = {
|
||||
expr =
|
||||
expr =
|
||||
let res1 = defaultOverridableDelayableArgs id {};
|
||||
res2 = defaultOverridableDelayableArgs id { a = 7; };
|
||||
res3 = let x = defaultOverridableDelayableArgs id { a = 7; };
|
||||
@@ -87,7 +87,7 @@ runTests {
|
||||
in (x2.replace) { a = 10; }; # and override the value by 10
|
||||
|
||||
# fixed tests (delayed args): (when using them add some comments, please)
|
||||
resFixed1 =
|
||||
resFixed1 =
|
||||
let x = defaultOverridableDelayableArgs id ( x : { a = 7; c = x.fixed.b; });
|
||||
y = x.merge (x : { name = "name-${builtins.toString x.fixed.c}"; });
|
||||
in (y.merge) { b = 10; };
|
||||
@@ -109,115 +109,5 @@ runTests {
|
||||
expr = sort builtins.lessThan [ 40 2 30 42 ];
|
||||
expected = [2 30 40 42];
|
||||
};
|
||||
|
||||
testToIntShouldConvertStringToInt = {
|
||||
expr = toInt "27";
|
||||
expected = 27;
|
||||
};
|
||||
|
||||
testToIntShouldThrowErrorIfItCouldNotConvertToInt = {
|
||||
expr = builtins.tryEval (toInt "\"foo\"");
|
||||
expected = { success = false; value = false; };
|
||||
};
|
||||
|
||||
testHasAttrByPathTrue = {
|
||||
expr = hasAttrByPath ["a" "b"] { a = { b = "yey"; }; };
|
||||
expected = true;
|
||||
};
|
||||
|
||||
testHasAttrByPathFalse = {
|
||||
expr = hasAttrByPath ["a" "b"] { a = { c = "yey"; }; };
|
||||
expected = false;
|
||||
};
|
||||
|
||||
|
||||
/* Generator tests */
|
||||
# these tests assume attributes are converted to lists
|
||||
# in alphabetical order
|
||||
|
||||
testMkKeyValueDefault = {
|
||||
expr = generators.mkKeyValueDefault ":" "f:oo" "bar";
|
||||
expected = ''f\:oo:bar'';
|
||||
};
|
||||
|
||||
testToKeyValue = {
|
||||
expr = generators.toKeyValue {} {
|
||||
key = "value";
|
||||
"other=key" = "baz";
|
||||
};
|
||||
expected = ''
|
||||
key=value
|
||||
other\=key=baz
|
||||
'';
|
||||
};
|
||||
|
||||
testToINIEmpty = {
|
||||
expr = generators.toINI {} {};
|
||||
expected = "";
|
||||
};
|
||||
|
||||
testToINIEmptySection = {
|
||||
expr = generators.toINI {} { foo = {}; bar = {}; };
|
||||
expected = ''
|
||||
[bar]
|
||||
|
||||
[foo]
|
||||
'';
|
||||
};
|
||||
|
||||
testToINIDefaultEscapes = {
|
||||
expr = generators.toINI {} {
|
||||
"no [ and ] allowed unescaped" = {
|
||||
"and also no = in keys" = 42;
|
||||
};
|
||||
};
|
||||
expected = ''
|
||||
[no \[ and \] allowed unescaped]
|
||||
and also no \= in keys=42
|
||||
'';
|
||||
};
|
||||
|
||||
testToINIDefaultFull = {
|
||||
expr = generators.toINI {} {
|
||||
"section 1" = {
|
||||
attribute1 = 5;
|
||||
x = "Me-se JarJar Binx";
|
||||
};
|
||||
"foo[]" = {
|
||||
"he\\h=he" = "this is okay";
|
||||
};
|
||||
};
|
||||
expected = ''
|
||||
[foo\[\]]
|
||||
he\h\=he=this is okay
|
||||
|
||||
[section 1]
|
||||
attribute1=5
|
||||
x=Me-se JarJar Binx
|
||||
'';
|
||||
};
|
||||
|
||||
/* right now only invocation check */
|
||||
testToJSONSimple =
|
||||
let val = {
|
||||
foobar = [ "baz" 1 2 3 ];
|
||||
};
|
||||
in {
|
||||
expr = generators.toJSON {} val;
|
||||
# trival implementation
|
||||
expected = builtins.toJSON val;
|
||||
};
|
||||
|
||||
/* right now only invocation check */
|
||||
testToYAMLSimple =
|
||||
let val = {
|
||||
list = [ { one = 1; } { two = 2; } ];
|
||||
all = 42;
|
||||
};
|
||||
in {
|
||||
expr = generators.toYAML {} val;
|
||||
# trival implementation
|
||||
expected = builtins.toJSON val;
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -1,127 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# This script is used to test that the module system is working as expected.
|
||||
# By default it test the version of nixpkgs which is defined in the NIX_PATH.
|
||||
|
||||
cd ./modules
|
||||
|
||||
pass=0
|
||||
fail=0
|
||||
|
||||
evalConfig() {
|
||||
local attr=$1
|
||||
shift;
|
||||
local script="import ./default.nix { modules = [ $@ ];}"
|
||||
nix-instantiate --timeout 1 -E "$script" -A "$attr" --eval-only --show-trace
|
||||
}
|
||||
|
||||
reportFailure() {
|
||||
local attr=$1
|
||||
shift;
|
||||
local script="import ./default.nix { modules = [ $@ ];}"
|
||||
echo 2>&1 "$ nix-instantiate -E '$script' -A '$attr' --eval-only"
|
||||
evalConfig "$attr" "$@"
|
||||
fail=$((fail + 1))
|
||||
}
|
||||
|
||||
checkConfigOutput() {
|
||||
local outputContains=$1
|
||||
shift;
|
||||
if evalConfig "$@" 2>/dev/null | grep --silent "$outputContains" ; then
|
||||
pass=$((pass + 1))
|
||||
return 0;
|
||||
else
|
||||
echo 2>&1 "error: Expected result matching '$outputContains', while evaluating"
|
||||
reportFailure "$@"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
checkConfigError() {
|
||||
local errorContains=$1
|
||||
local err=""
|
||||
shift;
|
||||
if err==$(evalConfig "$@" 2>&1 >/dev/null); then
|
||||
echo 2>&1 "error: Expected error code, got exit code 0, while evaluating"
|
||||
reportFailure "$@"
|
||||
return 1
|
||||
else
|
||||
if echo "$err" | grep --silent "$errorContains" ; then
|
||||
pass=$((pass + 1))
|
||||
return 0;
|
||||
else
|
||||
echo 2>&1 "error: Expected error matching '$errorContains', while evaluating"
|
||||
reportFailure "$@"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Check boolean option.
|
||||
checkConfigOutput "false" config.enable ./declare-enable.nix
|
||||
checkConfigError 'The option .* defined in .* does not exist.' config.enable ./define-enable.nix
|
||||
|
||||
# Check mkForce without submodules.
|
||||
set -- config.enable ./declare-enable.nix ./define-enable.nix
|
||||
checkConfigOutput "true" "$@"
|
||||
checkConfigOutput "false" "$@" ./define-force-enable.nix
|
||||
checkConfigOutput "false" "$@" ./define-enable-force.nix
|
||||
|
||||
# Check mkForce with option and submodules.
|
||||
checkConfigError 'attribute .*foo.* .* not found' config.loaOfSub.foo.enable ./declare-loaOfSub-any-enable.nix
|
||||
checkConfigOutput 'false' config.loaOfSub.foo.enable ./declare-loaOfSub-any-enable.nix ./define-loaOfSub-foo.nix
|
||||
set -- config.loaOfSub.foo.enable ./declare-loaOfSub-any-enable.nix ./define-loaOfSub-foo-enable.nix
|
||||
checkConfigOutput 'true' "$@"
|
||||
checkConfigOutput 'false' "$@" ./define-force-loaOfSub-foo-enable.nix
|
||||
checkConfigOutput 'false' "$@" ./define-loaOfSub-force-foo-enable.nix
|
||||
checkConfigOutput 'false' "$@" ./define-loaOfSub-foo-force-enable.nix
|
||||
checkConfigOutput 'false' "$@" ./define-loaOfSub-foo-enable-force.nix
|
||||
|
||||
# Check overriding effect of mkForce on submodule definitions.
|
||||
checkConfigError 'attribute .*bar.* .* not found' config.loaOfSub.bar.enable ./declare-loaOfSub-any-enable.nix ./define-loaOfSub-foo.nix
|
||||
checkConfigOutput 'false' config.loaOfSub.bar.enable ./declare-loaOfSub-any-enable.nix ./define-loaOfSub-foo.nix ./define-loaOfSub-bar.nix
|
||||
set -- config.loaOfSub.bar.enable ./declare-loaOfSub-any-enable.nix ./define-loaOfSub-foo.nix ./define-loaOfSub-bar-enable.nix
|
||||
checkConfigOutput 'true' "$@"
|
||||
checkConfigError 'attribute .*bar.* .* not found' "$@" ./define-force-loaOfSub-foo-enable.nix
|
||||
checkConfigError 'attribute .*bar.* .* not found' "$@" ./define-loaOfSub-force-foo-enable.nix
|
||||
checkConfigOutput 'true' "$@" ./define-loaOfSub-foo-force-enable.nix
|
||||
checkConfigOutput 'true' "$@" ./define-loaOfSub-foo-enable-force.nix
|
||||
|
||||
# Check mkIf with submodules.
|
||||
checkConfigError 'attribute .*foo.* .* not found' config.loaOfSub.foo.enable ./declare-enable.nix ./declare-loaOfSub-any-enable.nix
|
||||
set -- config.loaOfSub.foo.enable ./declare-enable.nix ./declare-loaOfSub-any-enable.nix
|
||||
checkConfigError 'attribute .*foo.* .* not found' "$@" ./define-if-loaOfSub-foo-enable.nix
|
||||
checkConfigError 'attribute .*foo.* .* not found' "$@" ./define-loaOfSub-if-foo-enable.nix
|
||||
checkConfigError 'attribute .*foo.* .* not found' "$@" ./define-loaOfSub-foo-if-enable.nix
|
||||
checkConfigOutput 'false' "$@" ./define-loaOfSub-foo-enable-if.nix
|
||||
checkConfigOutput 'true' "$@" ./define-enable.nix ./define-if-loaOfSub-foo-enable.nix
|
||||
checkConfigOutput 'true' "$@" ./define-enable.nix ./define-loaOfSub-if-foo-enable.nix
|
||||
checkConfigOutput 'true' "$@" ./define-enable.nix ./define-loaOfSub-foo-if-enable.nix
|
||||
checkConfigOutput 'true' "$@" ./define-enable.nix ./define-loaOfSub-foo-enable-if.nix
|
||||
|
||||
# Check _module.args.
|
||||
set -- config.enable ./declare-enable.nix ./define-enable-with-custom-arg.nix
|
||||
checkConfigError 'while evaluating the module argument .*custom.* in .*define-enable-with-custom-arg.nix.*:' "$@"
|
||||
checkConfigOutput "true" "$@" ./define-_module-args-custom.nix
|
||||
|
||||
# Check that using _module.args on imports cause infinite recursions, with
|
||||
# the proper error context.
|
||||
set -- "$@" ./define-_module-args-custom.nix ./import-custom-arg.nix
|
||||
checkConfigError 'while evaluating the module argument .*custom.* in .*import-custom-arg.nix.*:' "$@"
|
||||
checkConfigError 'infinite recursion encountered' "$@"
|
||||
|
||||
# Check _module.check.
|
||||
set -- config.enable ./declare-enable.nix ./define-enable.nix ./define-loaOfSub-foo.nix
|
||||
checkConfigError 'The option .* defined in .* does not exist.' "$@"
|
||||
checkConfigOutput "true" "$@" ./define-module-check.nix
|
||||
|
||||
cat <<EOF
|
||||
====== module tests ======
|
||||
$pass Pass
|
||||
$fail Fail
|
||||
EOF
|
||||
|
||||
if test $fail -ne 0; then
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
@@ -1,14 +0,0 @@
|
||||
{ lib, ... }:
|
||||
|
||||
{
|
||||
options = {
|
||||
enable = lib.mkOption {
|
||||
default = false;
|
||||
example = true;
|
||||
type = lib.types.bool;
|
||||
description = ''
|
||||
Some descriptive text
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
{ lib, ... }:
|
||||
|
||||
let
|
||||
submod = { ... }: {
|
||||
options = {
|
||||
enable = lib.mkOption {
|
||||
default = false;
|
||||
example = true;
|
||||
type = lib.types.bool;
|
||||
description = ''
|
||||
Some descriptive text
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
in
|
||||
|
||||
{
|
||||
options = {
|
||||
loaOfSub = lib.mkOption {
|
||||
default = {};
|
||||
example = {};
|
||||
type = lib.types.loaOf (lib.types.submodule [ submod ]);
|
||||
description = ''
|
||||
Some descriptive text
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{ lib ? import <nixpkgs/lib>, modules ? [] }:
|
||||
|
||||
{
|
||||
inherit (lib.evalModules {
|
||||
inherit modules;
|
||||
}) config options;
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{ lib, ... }:
|
||||
|
||||
{
|
||||
config = {
|
||||
_module.args.custom = true;
|
||||
};
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{ lib, ... }:
|
||||
|
||||
{
|
||||
enable = lib.mkForce false;
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{ lib, custom, ... }:
|
||||
|
||||
{
|
||||
config = {
|
||||
enable = custom;
|
||||
};
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
enable = true;
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{ lib, ... }:
|
||||
|
||||
lib.mkForce {
|
||||
enable = false;
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{ lib, ... }:
|
||||
|
||||
lib.mkForce {
|
||||
loaOfSub.foo.enable = false;
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{ config, lib, ... }:
|
||||
|
||||
lib.mkIf config.enable {
|
||||
loaOfSub.foo.enable = true;
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
loaOfSub.bar.enable = true;
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
loaOfSub.bar = {};
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{ lib, ... }:
|
||||
|
||||
{
|
||||
loaOfSub.foo.enable = lib.mkForce false;
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{ config, lib, ... }:
|
||||
|
||||
{
|
||||
loaOfSub.foo.enable = lib.mkIf config.enable true;
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
loaOfSub.foo.enable = true;
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{ lib, ... }:
|
||||
|
||||
{
|
||||
loaOfSub.foo = lib.mkForce {
|
||||
enable = false;
|
||||
};
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{ config, lib, ... }:
|
||||
|
||||
{
|
||||
loaOfSub.foo = lib.mkIf config.enable {
|
||||
enable = true;
|
||||
};
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
loaOfSub.foo = {};
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{ lib, ... }:
|
||||
|
||||
{
|
||||
loaOfSub = lib.mkForce {
|
||||
foo.enable = false;
|
||||
};
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{ config, lib, ... }:
|
||||
|
||||
{
|
||||
loaOfSub = lib.mkIf config.enable {
|
||||
foo.enable = true;
|
||||
};
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
_module.check = false;
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
{ lib, custom, ... }:
|
||||
|
||||
{
|
||||
imports = []
|
||||
++ lib.optional custom ./define-enable-force.nix;
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
{ nixpkgs }:
|
||||
|
||||
with import ../.. { };
|
||||
with lib;
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "nixpkgs-lib-tests";
|
||||
buildInputs = [ nix ];
|
||||
NIX_PATH="nixpkgs=${nixpkgs}";
|
||||
|
||||
buildCommand = ''
|
||||
datadir="${nix}/share"
|
||||
export TEST_ROOT=$(pwd)/test-tmp
|
||||
export NIX_BUILD_HOOK=
|
||||
export NIX_CONF_DIR=$TEST_ROOT/etc
|
||||
export NIX_DB_DIR=$TEST_ROOT/db
|
||||
export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
|
||||
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
|
||||
export NIX_MANIFESTS_DIR=$TEST_ROOT/var/nix/manifests
|
||||
export NIX_STATE_DIR=$TEST_ROOT/var/nix
|
||||
export NIX_STORE_DIR=$TEST_ROOT/store
|
||||
export PAGER=cat
|
||||
cacheDir=$TEST_ROOT/binary-cache
|
||||
nix-store --init
|
||||
|
||||
cd ${nixpkgs}/lib/tests
|
||||
./modules.sh
|
||||
|
||||
touch $out
|
||||
'';
|
||||
}
|
||||
138
lib/trivial.nix
138
lib/trivial.nix
@@ -1,3 +1,8 @@
|
||||
with {
|
||||
inherit (import ./lists.nix) deepSeqList;
|
||||
inherit (import ./attrsets.nix) deepSeqAttrs;
|
||||
};
|
||||
|
||||
rec {
|
||||
|
||||
# Identity function.
|
||||
@@ -12,130 +17,37 @@ rec {
|
||||
and = x: y: x && y;
|
||||
mergeAttrs = x: y: x // y;
|
||||
|
||||
# Compute the fixed point of the given function `f`, which is usually an
|
||||
# attribute set that expects its final, non-recursive representation as an
|
||||
# argument:
|
||||
#
|
||||
# f = self: { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; }
|
||||
#
|
||||
# Nix evaluates this recursion until all references to `self` have been
|
||||
# resolved. At that point, the final result is returned and `f x = x` holds:
|
||||
#
|
||||
# nix-repl> fix f
|
||||
# { bar = "bar"; foo = "foo"; foobar = "foobar"; }
|
||||
#
|
||||
# See https://en.wikipedia.org/wiki/Fixed-point_combinator for further
|
||||
# details.
|
||||
fix = f: let x = f x; in x;
|
||||
|
||||
# A variant of `fix` that records the original recursive attribute set in the
|
||||
# result. This is useful in combination with the `extends` function to
|
||||
# implement deep overriding. See pkgs/development/haskell-modules/default.nix
|
||||
# for a concrete example.
|
||||
fix' = f: let x = f x // { __unfix__ = f; }; in x;
|
||||
|
||||
# Modify the contents of an explicitly recursive attribute set in a way that
|
||||
# honors `self`-references. This is accomplished with a function
|
||||
#
|
||||
# g = self: super: { foo = super.foo + " + "; }
|
||||
#
|
||||
# that has access to the unmodified input (`super`) as well as the final
|
||||
# non-recursive representation of the attribute set (`self`). `extends`
|
||||
# differs from the native `//` operator insofar as that it's applied *before*
|
||||
# references to `self` are resolved:
|
||||
#
|
||||
# nix-repl> fix (extends g f)
|
||||
# { bar = "bar"; foo = "foo + "; foobar = "foo + bar"; }
|
||||
#
|
||||
# The name of the function is inspired by object-oriented inheritance, i.e.
|
||||
# think of it as an infix operator `g extends f` that mimics the syntax from
|
||||
# Java. It may seem counter-intuitive to have the "base class" as the second
|
||||
# argument, but it's nice this way if several uses of `extends` are cascaded.
|
||||
extends = f: rattrs: self: let super = rattrs self; in super // f self super;
|
||||
|
||||
# Create an overridable, recursive attribute set. For example:
|
||||
#
|
||||
# nix-repl> obj = makeExtensible (self: { })
|
||||
#
|
||||
# nix-repl> obj
|
||||
# { __unfix__ = «lambda»; extend = «lambda»; }
|
||||
#
|
||||
# nix-repl> obj = obj.extend (self: super: { foo = "foo"; })
|
||||
#
|
||||
# nix-repl> obj
|
||||
# { __unfix__ = «lambda»; extend = «lambda»; foo = "foo"; }
|
||||
#
|
||||
# nix-repl> obj = obj.extend (self: super: { foo = super.foo + " + "; bar = "bar"; foobar = self.foo + self.bar; })
|
||||
#
|
||||
# nix-repl> obj
|
||||
# { __unfix__ = «lambda»; bar = "bar"; extend = «lambda»; foo = "foo + "; foobar = "foo + bar"; }
|
||||
makeExtensible = makeExtensibleWithCustomName "extend";
|
||||
|
||||
# Same as `makeExtensible` but the name of the extending attribute is
|
||||
# customized.
|
||||
makeExtensibleWithCustomName = extenderName: rattrs:
|
||||
fix' rattrs // {
|
||||
${extenderName} = f: makeExtensibleWithCustomName extenderName (extends f rattrs);
|
||||
};
|
||||
# Take a function and evaluate it with its own returned value.
|
||||
fix = f: let result = f result; in result;
|
||||
|
||||
# Flip the order of the arguments of a binary function.
|
||||
flip = f: a: b: f b a;
|
||||
|
||||
# `seq x y' evaluates x, then returns y. That is, it forces strict
|
||||
# evaluation of its first argument.
|
||||
seq = x: y: if x == null then y else y;
|
||||
|
||||
# Like `seq', but recurses into lists and attribute sets to force evaluation
|
||||
# of all list elements/attributes.
|
||||
deepSeq = x: y:
|
||||
if builtins.isList x
|
||||
then deepSeqList x y
|
||||
else if builtins.isAttrs x
|
||||
then deepSeqAttrs x y
|
||||
else seq x y;
|
||||
|
||||
# Pull in some builtins not included elsewhere.
|
||||
inherit (builtins)
|
||||
pathExists readFile isBool isFunction
|
||||
isInt add sub lessThan
|
||||
seq deepSeq genericClosure;
|
||||
|
||||
inherit (import ./strings.nix) fileContents;
|
||||
isInt add sub lessThan;
|
||||
|
||||
# Return the Nixpkgs version number.
|
||||
nixpkgsVersion =
|
||||
let suffixFile = ../.version-suffix; in
|
||||
fileContents ../.version
|
||||
+ (if pathExists suffixFile then fileContents suffixFile else "pre-git");
|
||||
readFile ../.version
|
||||
+ (if pathExists suffixFile then readFile suffixFile else "pre-git");
|
||||
|
||||
# Whether we're being called by nix-shell.
|
||||
inNixShell = builtins.getEnv "IN_NIX_SHELL" != "";
|
||||
# Whether we're being called by nix-shell. This is useful to
|
||||
inNixShell = builtins.getEnv "IN_NIX_SHELL" == "1";
|
||||
|
||||
# Return minimum/maximum of two numbers.
|
||||
min = x: y: if x < y then x else y;
|
||||
max = x: y: if x > y then x else y;
|
||||
|
||||
/* Reads a JSON file. It is useful to import pure data into other nix
|
||||
expressions.
|
||||
|
||||
Example:
|
||||
|
||||
mkDerivation {
|
||||
src = fetchgit (importJSON ./repo.json)
|
||||
#...
|
||||
}
|
||||
|
||||
where repo.json contains:
|
||||
|
||||
{
|
||||
"url": "git://some-domain/some/repo",
|
||||
"rev": "265de7283488964f44f0257a8b4a055ad8af984d",
|
||||
"sha256": "0sb3h3067pzf3a7mlxn1hikpcjrsvycjcnj9hl9b1c3ykcgvps7h"
|
||||
}
|
||||
|
||||
*/
|
||||
importJSON = path:
|
||||
builtins.fromJSON (builtins.readFile path);
|
||||
|
||||
/* See https://github.com/NixOS/nix/issues/749. Eventually we'd like these
|
||||
to expand to Nix builtins that carry metadata so that Nix can filter out
|
||||
the INFO messages without parsing the message string.
|
||||
|
||||
Usage:
|
||||
{
|
||||
foo = lib.warn "foo is deprecated" oldFoo;
|
||||
}
|
||||
|
||||
TODO: figure out a clever way to integrate location information from
|
||||
something like __unsafeGetAttrPos.
|
||||
*/
|
||||
warn = msg: builtins.trace "WARNING: ${msg}";
|
||||
info = msg: builtins.trace "INFO: ${msg}";
|
||||
}
|
||||
|
||||
236
lib/types.nix
236
lib/types.nix
@@ -6,7 +6,6 @@ with import ./attrsets.nix;
|
||||
with import ./options.nix;
|
||||
with import ./trivial.nix;
|
||||
with import ./strings.nix;
|
||||
with {inherit (import ./modules.nix) mergeDefinitions filterOverrides; };
|
||||
|
||||
rec {
|
||||
|
||||
@@ -17,43 +16,10 @@ rec {
|
||||
};
|
||||
|
||||
|
||||
# Default type merging function
|
||||
# takes two type functors and return the merged type
|
||||
defaultTypeMerge = f: f':
|
||||
let wrapped = f.wrapped.typeMerge f'.wrapped.functor;
|
||||
payload = f.binOp f.payload f'.payload;
|
||||
in
|
||||
# cannot merge different types
|
||||
if f.name != f'.name
|
||||
then null
|
||||
# simple types
|
||||
else if (f.wrapped == null && f'.wrapped == null)
|
||||
&& (f.payload == null && f'.payload == null)
|
||||
then f.type
|
||||
# composed types
|
||||
else if (f.wrapped != null && f'.wrapped != null) && (wrapped != null)
|
||||
then f.type wrapped
|
||||
# value types
|
||||
else if (f.payload != null && f'.payload != null) && (payload != null)
|
||||
then f.type payload
|
||||
else null;
|
||||
|
||||
# Default type functor
|
||||
defaultFunctor = name: {
|
||||
inherit name;
|
||||
type = types."${name}" or null;
|
||||
wrapped = null;
|
||||
payload = null;
|
||||
binOp = a: b: null;
|
||||
};
|
||||
|
||||
isOptionType = isType "option-type";
|
||||
mkOptionType =
|
||||
{ # Human-readable representation of the type, should be equivalent to
|
||||
# the type function name.
|
||||
{ # Human-readable representation of the type.
|
||||
name
|
||||
, # Description of the type, defined recursively by embedding the the wrapped type if any.
|
||||
description ? null
|
||||
, # Function applied to each definition that should return true if
|
||||
# its type-correct, false otherwise.
|
||||
check ? (x: true)
|
||||
@@ -69,26 +35,12 @@ rec {
|
||||
getSubOptions ? prefix: {}
|
||||
, # List of modules if any, or null if none.
|
||||
getSubModules ? null
|
||||
, # Function for building the same option type with a different list of
|
||||
, # Function for building the same option type with a different list of
|
||||
# modules.
|
||||
substSubModules ? m: null
|
||||
, # Function that merge type declarations.
|
||||
# internal, takes a functor as argument and returns the merged type.
|
||||
# returning null means the type is not mergeable
|
||||
typeMerge ? defaultTypeMerge functor
|
||||
, # The type functor.
|
||||
# internal, representation of the type as an attribute set.
|
||||
# name: name of the type
|
||||
# type: type function.
|
||||
# wrapped: the type wrapped in case of compound types.
|
||||
# payload: values of the type, two payloads of the same type must be
|
||||
# combinable with the binOp binary operation.
|
||||
# binOp: binary operation that merge two payloads of the same type.
|
||||
functor ? defaultFunctor name
|
||||
}:
|
||||
{ _type = "option-type";
|
||||
inherit name check merge getSubOptions getSubModules substSubModules typeMerge functor;
|
||||
description = if description == null then name else description;
|
||||
inherit name check merge getSubOptions getSubModules substSubModules;
|
||||
};
|
||||
|
||||
|
||||
@@ -99,39 +51,29 @@ rec {
|
||||
};
|
||||
|
||||
bool = mkOptionType {
|
||||
name = "bool";
|
||||
description = "boolean";
|
||||
name = "boolean";
|
||||
check = isBool;
|
||||
merge = mergeEqualOption;
|
||||
merge = loc: fold (x: y: x.value || y) false;
|
||||
};
|
||||
|
||||
int = mkOptionType rec {
|
||||
name = "int";
|
||||
description = "integer";
|
||||
int = mkOptionType {
|
||||
name = "integer";
|
||||
check = isInt;
|
||||
merge = mergeOneOption;
|
||||
};
|
||||
|
||||
str = mkOptionType {
|
||||
name = "str";
|
||||
description = "string";
|
||||
name = "string";
|
||||
check = isString;
|
||||
merge = mergeOneOption;
|
||||
};
|
||||
|
||||
# Merge multiple definitions by concatenating them (with the given
|
||||
# separator between the values).
|
||||
separatedString = sep: mkOptionType rec {
|
||||
name = "separatedString";
|
||||
description = "string";
|
||||
separatedString = sep: mkOptionType {
|
||||
name = "string";
|
||||
check = isString;
|
||||
merge = loc: defs: concatStringsSep sep (getValues defs);
|
||||
functor = (defaultFunctor name) // {
|
||||
payload = sep;
|
||||
binOp = sepLhs: sepRhs:
|
||||
if sepLhs == sepRhs then sepLhs
|
||||
else null;
|
||||
};
|
||||
};
|
||||
|
||||
lines = separatedString "\n";
|
||||
@@ -143,72 +85,51 @@ rec {
|
||||
string = separatedString "";
|
||||
|
||||
attrs = mkOptionType {
|
||||
name = "attrs";
|
||||
description = "attribute set";
|
||||
name = "attribute set";
|
||||
check = isAttrs;
|
||||
merge = loc: foldl' (res: def: mergeAttrs res def.value) {};
|
||||
merge = loc: fold (def: mergeAttrs def.value) {};
|
||||
};
|
||||
|
||||
# derivation is a reserved keyword.
|
||||
package = mkOptionType {
|
||||
name = "package";
|
||||
check = x: isDerivation x || isStorePath x;
|
||||
merge = loc: defs:
|
||||
let res = mergeOneOption loc defs;
|
||||
in if isDerivation res then res else toDerivation res;
|
||||
};
|
||||
|
||||
shellPackage = package // {
|
||||
check = x: (package.check x) && (hasAttr "shellPath" x);
|
||||
name = "derivation";
|
||||
check = isDerivation;
|
||||
merge = mergeOneOption;
|
||||
};
|
||||
|
||||
path = mkOptionType {
|
||||
name = "path";
|
||||
# Hacky: there is no ‘isPath’ primop.
|
||||
check = x: builtins.substring 0 1 (toString x) == "/";
|
||||
check = x: builtins.unsafeDiscardStringContext (builtins.substring 0 1 (toString x)) == "/";
|
||||
merge = mergeOneOption;
|
||||
};
|
||||
|
||||
# drop this in the future:
|
||||
list = builtins.trace "`types.list' is deprecated; use `types.listOf' instead" types.listOf;
|
||||
|
||||
listOf = elemType: mkOptionType rec {
|
||||
name = "listOf";
|
||||
description = "list of ${elemType.description}s";
|
||||
check = isList;
|
||||
listOf = elemType: mkOptionType {
|
||||
name = "list of ${elemType.name}s";
|
||||
check = value: isList value && all elemType.check value;
|
||||
merge = loc: defs:
|
||||
map (x: x.value) (filter (x: x ? value) (concatLists (imap (n: def:
|
||||
if isList def.value then
|
||||
imap (m: def':
|
||||
(mergeDefinitions
|
||||
(loc ++ ["[definition ${toString n}-entry ${toString m}]"])
|
||||
elemType
|
||||
[{ inherit (def) file; value = def'; }]
|
||||
).optionalValue
|
||||
) def.value
|
||||
else
|
||||
throw "The option value `${showOption loc}' in `${def.file}' is not a list.") defs)));
|
||||
concatLists (imap (n: def: imap (m: def':
|
||||
elemType.merge (loc ++ ["[${toString n}-${toString m}]"])
|
||||
[{ inherit (def) file; value = def'; }]) def.value) defs);
|
||||
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["*"]);
|
||||
getSubModules = elemType.getSubModules;
|
||||
substSubModules = m: listOf (elemType.substSubModules m);
|
||||
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||
};
|
||||
|
||||
attrsOf = elemType: mkOptionType rec {
|
||||
name = "attrsOf";
|
||||
description = "attribute set of ${elemType.description}s";
|
||||
check = isAttrs;
|
||||
attrsOf = elemType: mkOptionType {
|
||||
name = "attribute set of ${elemType.name}s";
|
||||
check = x: isAttrs x && all elemType.check (attrValues x);
|
||||
merge = loc: defs:
|
||||
mapAttrs (n: v: v.value) (filterAttrs (n: v: v ? value) (zipAttrsWith (name: defs:
|
||||
(mergeDefinitions (loc ++ [name]) elemType defs).optionalValue
|
||||
)
|
||||
zipAttrsWith (name: elemType.merge (loc ++ [name]))
|
||||
# Push down position info.
|
||||
(map (def: listToAttrs (mapAttrsToList (n: def':
|
||||
{ name = n; value = { inherit (def) file; value = def'; }; }) def.value)) defs)));
|
||||
{ name = n; value = { inherit (def) file; value = def'; }; }) def.value)) defs);
|
||||
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["<name>"]);
|
||||
getSubModules = elemType.getSubModules;
|
||||
substSubModules = m: attrsOf (elemType.substSubModules m);
|
||||
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||
};
|
||||
|
||||
# List or attribute set of ...
|
||||
@@ -227,52 +148,31 @@ rec {
|
||||
def;
|
||||
listOnly = listOf elemType;
|
||||
attrOnly = attrsOf elemType;
|
||||
in mkOptionType rec {
|
||||
name = "loaOf";
|
||||
description = "list or attribute set of ${elemType.description}s";
|
||||
check = x: isList x || isAttrs x;
|
||||
in mkOptionType {
|
||||
name = "list or attribute set of ${elemType.name}s";
|
||||
check = x:
|
||||
if isList x then listOnly.check x
|
||||
else if isAttrs x then attrOnly.check x
|
||||
else false;
|
||||
merge = loc: defs: attrOnly.merge loc (imap convertIfList defs);
|
||||
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["<name?>"]);
|
||||
getSubModules = elemType.getSubModules;
|
||||
substSubModules = m: loaOf (elemType.substSubModules m);
|
||||
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||
};
|
||||
|
||||
# List or element of ...
|
||||
loeOf = elemType: mkOptionType rec {
|
||||
name = "loeOf";
|
||||
description = "element or list of ${elemType.description}s";
|
||||
check = x: isList x || elemType.check x;
|
||||
merge = loc: defs:
|
||||
let
|
||||
defs' = filterOverrides defs;
|
||||
res = (head defs').value;
|
||||
in
|
||||
if isList res then concatLists (getValues defs')
|
||||
else if lessThan 1 (length defs') then
|
||||
throw "The option `${showOption loc}' is defined multiple times, in ${showFiles (getFiles defs)}."
|
||||
else if !isString res then
|
||||
throw "The option `${showOption loc}' does not have a string value, in ${showFiles (getFiles defs)}."
|
||||
else res;
|
||||
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||
};
|
||||
|
||||
uniq = elemType: mkOptionType rec {
|
||||
name = "uniq";
|
||||
inherit (elemType) description check;
|
||||
uniq = elemType: mkOptionType {
|
||||
inherit (elemType) name check;
|
||||
merge = mergeOneOption;
|
||||
getSubOptions = elemType.getSubOptions;
|
||||
getSubModules = elemType.getSubModules;
|
||||
substSubModules = m: uniq (elemType.substSubModules m);
|
||||
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||
};
|
||||
|
||||
nullOr = elemType: mkOptionType rec {
|
||||
name = "nullOr";
|
||||
description = "null or ${elemType.description}";
|
||||
check = x: x == null || elemType.check x;
|
||||
nullOr = elemType: mkOptionType {
|
||||
name = "null or ${elemType.name}";
|
||||
check = x: builtins.isNull x || elemType.check x;
|
||||
merge = loc: defs:
|
||||
let nrNulls = count (def: def.value == null) defs; in
|
||||
let nrNulls = count (def: isNull def.value) defs; in
|
||||
if nrNulls == length defs then null
|
||||
else if nrNulls != 0 then
|
||||
throw "The option `${showOption loc}' is defined both null and not null, in ${showFiles (getFiles defs)}."
|
||||
@@ -280,7 +180,6 @@ rec {
|
||||
getSubOptions = elemType.getSubOptions;
|
||||
getSubModules = elemType.getSubModules;
|
||||
substSubModules = m: nullOr (elemType.substSubModules m);
|
||||
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||
};
|
||||
|
||||
submodule = opts:
|
||||
@@ -295,69 +194,32 @@ rec {
|
||||
let
|
||||
coerce = def: if isFunction def then def else { config = def; };
|
||||
modules = opts' ++ map (def: { _file = def.file; imports = [(coerce def.value)]; }) defs;
|
||||
in (evalModules {
|
||||
inherit modules;
|
||||
args.name = last loc;
|
||||
prefix = loc;
|
||||
}).config;
|
||||
in (evalModules { inherit modules; args.name = last loc; prefix = loc; }).config;
|
||||
getSubOptions = prefix: (evalModules
|
||||
{ modules = opts'; inherit prefix;
|
||||
# FIXME: hack to get shit to evaluate.
|
||||
args = { name = ""; }; }).options;
|
||||
getSubModules = opts';
|
||||
substSubModules = m: submodule m;
|
||||
functor = (defaultFunctor name) // {
|
||||
# Merging of submodules is done as part of mergeOptionDecls, as we have to annotate
|
||||
# each submodule with its location.
|
||||
payload = [];
|
||||
binOp = lhs: rhs: [];
|
||||
};
|
||||
};
|
||||
|
||||
enum = values:
|
||||
let
|
||||
show = v:
|
||||
if builtins.isString v then ''"${v}"''
|
||||
else if builtins.isInt v then builtins.toString v
|
||||
else ''<${builtins.typeOf v}>'';
|
||||
in
|
||||
mkOptionType rec {
|
||||
name = "enum";
|
||||
description = "one of ${concatMapStringsSep ", " show values}";
|
||||
check = flip elem values;
|
||||
merge = mergeOneOption;
|
||||
functor = (defaultFunctor name) // { payload = values; binOp = a: b: unique (a ++ b); };
|
||||
};
|
||||
enum = values: mkOptionType {
|
||||
name = "one of ${concatStringsSep ", " values}";
|
||||
check = flip elem values;
|
||||
merge = mergeOneOption;
|
||||
};
|
||||
|
||||
either = t1: t2: mkOptionType rec {
|
||||
name = "either";
|
||||
description = "${t1.description} or ${t2.description}";
|
||||
either = t1: t2: mkOptionType {
|
||||
name = "${t1.name} or ${t2.name}";
|
||||
check = x: t1.check x || t2.check x;
|
||||
merge = loc: defs:
|
||||
let
|
||||
defList = map (d: d.value) defs;
|
||||
in
|
||||
if all (x: t1.check x) defList
|
||||
then t1.merge loc defs
|
||||
else if all (x: t2.check x) defList
|
||||
then t2.merge loc defs
|
||||
else mergeOneOption loc defs;
|
||||
typeMerge = f':
|
||||
let mt1 = t1.typeMerge (elemAt f'.wrapped 0).functor;
|
||||
mt2 = t2.typeMerge (elemAt f'.wrapped 1).functor;
|
||||
in
|
||||
if (name == f'.name) && (mt1 != null) && (mt2 != null)
|
||||
then functor.type mt1 mt2
|
||||
else null;
|
||||
functor = (defaultFunctor name) // { wrapped = [ t1 t2 ]; };
|
||||
merge = mergeOneOption;
|
||||
};
|
||||
|
||||
# Obsolete alternative to configOf. It takes its option
|
||||
# declarations from the ‘options’ attribute of containing option
|
||||
# declaration.
|
||||
optionSet = mkOptionType {
|
||||
name = builtins.trace "types.optionSet is deprecated; use types.submodule instead" "optionSet";
|
||||
description = "option set";
|
||||
name = /* builtins.trace "types.optionSet is deprecated; use types.submodule instead" */ "option set";
|
||||
};
|
||||
|
||||
# Augment the given type with an additional type check function.
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
/* Helper expression for copy-tarballs. This returns (nearly) all
|
||||
tarballs used the free packages in Nixpkgs.
|
||||
|
||||
Typical usage:
|
||||
|
||||
$ copy-tarballs.pl --expr 'import <nixpkgs/maintainers/scripts/all-tarballs.nix>'
|
||||
*/
|
||||
|
||||
removeAttrs (import ../../pkgs/top-level/release.nix
|
||||
{ # Don't apply ‘hydraJob’ to jobs, because then we can't get to the
|
||||
# dependency graph.
|
||||
scrubJobs = false;
|
||||
# No need to evaluate on i686.
|
||||
supportedSystems = [ "x86_64-linux" ];
|
||||
})
|
||||
[ # Remove jobs whose evaluation depends on a writable Nix store.
|
||||
"tarball" "unstable"
|
||||
]
|
||||
@@ -1,219 +1,97 @@
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i perl -p perl perlPackages.NetAmazonS3 perlPackages.FileSlurp nixUnstable
|
||||
|
||||
# This command uploads tarballs to tarballs.nixos.org, the
|
||||
# content-addressed cache used by fetchurl as a fallback for when
|
||||
# upstream tarballs disappear or change. Usage:
|
||||
#
|
||||
# 1) To upload one or more files:
|
||||
#
|
||||
# $ copy-tarballs.pl --file /path/to/tarball.tar.gz
|
||||
#
|
||||
# 2) To upload all files obtained via calls to fetchurl in a Nix derivation:
|
||||
#
|
||||
# $ copy-tarballs.pl --expr '(import <nixpkgs> {}).hello'
|
||||
#! /run/current-system/sw/bin/perl -w
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
use XML::Simple;
|
||||
use File::Basename;
|
||||
use File::Path;
|
||||
use File::Slurp;
|
||||
use JSON;
|
||||
use Net::Amazon::S3;
|
||||
use File::Copy 'cp';
|
||||
use IPC::Open2;
|
||||
use Nix::Store;
|
||||
|
||||
isValidPath("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-foo"); # FIXME: forces Nix::Store initialisation
|
||||
my $myDir = dirname($0);
|
||||
|
||||
sub usage {
|
||||
die "Syntax: $0 [--dry-run] [--exclude REGEXP] [--expr EXPR | --file FILES...]\n";
|
||||
}
|
||||
my $tarballsCache = $ENV{'NIX_TARBALLS_CACHE'} // "/tarballs";
|
||||
|
||||
my $dryRun = 0;
|
||||
my $expr;
|
||||
my @fileNames;
|
||||
my $exclude;
|
||||
my $xml = `nix-instantiate --eval-only --xml --strict '<nixpkgs/maintainers/scripts/find-tarballs.nix>'`;
|
||||
die "$0: evaluation failed\n" if $? != 0;
|
||||
|
||||
while (@ARGV) {
|
||||
my $flag = shift @ARGV;
|
||||
my $data = XMLin($xml) or die;
|
||||
|
||||
if ($flag eq "--expr") {
|
||||
$expr = shift @ARGV or die "--expr requires an argument";
|
||||
} elsif ($flag eq "--file") {
|
||||
@fileNames = @ARGV;
|
||||
last;
|
||||
} elsif ($flag eq "--dry-run") {
|
||||
$dryRun = 1;
|
||||
} elsif ($flag eq "--exclude") {
|
||||
$exclude = shift @ARGV or die "--exclude requires an argument";
|
||||
} else {
|
||||
usage();
|
||||
}
|
||||
}
|
||||
mkpath($tarballsCache);
|
||||
mkpath("$tarballsCache/md5");
|
||||
mkpath("$tarballsCache/sha1");
|
||||
mkpath("$tarballsCache/sha256");
|
||||
|
||||
foreach my $file (@{$data->{list}->{attrs}}) {
|
||||
my $url = $file->{attr}->{url}->{string}->{value};
|
||||
my $algo = $file->{attr}->{type}->{string}->{value};
|
||||
my $hash = $file->{attr}->{hash}->{string}->{value};
|
||||
|
||||
# S3 setup.
|
||||
my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "AWS_ACCESS_KEY_ID not set\n";
|
||||
my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "AWS_SECRET_ACCESS_KEY not set\n";
|
||||
|
||||
my $s3 = Net::Amazon::S3->new(
|
||||
{ aws_access_key_id => $aws_access_key_id,
|
||||
aws_secret_access_key => $aws_secret_access_key,
|
||||
retry => 1,
|
||||
});
|
||||
|
||||
my $bucket = $s3->bucket("nixpkgs-tarballs") or die;
|
||||
|
||||
my $doWrite = 0;
|
||||
my $cacheFile = ($ENV{"HOME"} or die "\$HOME is not set") . "/.cache/nix/copy-tarballs";
|
||||
my %cache;
|
||||
$cache{$_} = 1 foreach read_file($cacheFile, err_mode => 'quiet', chomp => 1);
|
||||
$doWrite = 1;
|
||||
|
||||
END() {
|
||||
File::Path::mkpath(dirname($cacheFile), 0, 0755);
|
||||
write_file($cacheFile, map { "$_\n" } keys %cache) if $doWrite;
|
||||
}
|
||||
|
||||
sub alreadyMirrored {
|
||||
my ($algo, $hash) = @_;
|
||||
my $key = "$algo/$hash";
|
||||
return 1 if defined $cache{$key};
|
||||
my $res = defined $bucket->get_key($key);
|
||||
$cache{$key} = 1 if $res;
|
||||
return $res;
|
||||
}
|
||||
|
||||
sub uploadFile {
|
||||
my ($fn, $name) = @_;
|
||||
|
||||
my $md5_16 = hashFile("md5", 0, $fn) or die;
|
||||
my $sha1_16 = hashFile("sha1", 0, $fn) or die;
|
||||
my $sha256_32 = hashFile("sha256", 1, $fn) or die;
|
||||
my $sha256_16 = hashFile("sha256", 0, $fn) or die;
|
||||
my $sha512_32 = hashFile("sha512", 1, $fn) or die;
|
||||
my $sha512_16 = hashFile("sha512", 0, $fn) or die;
|
||||
|
||||
my $mainKey = "sha512/$sha512_16";
|
||||
|
||||
# Create redirects from the other hash types.
|
||||
sub redirect {
|
||||
my ($name, $dest) = @_;
|
||||
#print STDERR "linking $name to $dest...\n";
|
||||
$bucket->add_key($name, "", { 'x-amz-website-redirect-location' => "/" . $dest })
|
||||
or die "failed to create redirect from $name to $dest\n";
|
||||
$cache{$name} = 1;
|
||||
}
|
||||
redirect "md5/$md5_16", $mainKey;
|
||||
redirect "sha1/$sha1_16", $mainKey;
|
||||
redirect "sha256/$sha256_32", $mainKey;
|
||||
redirect "sha256/$sha256_16", $mainKey;
|
||||
redirect "sha512/$sha512_32", $mainKey;
|
||||
|
||||
# Upload the file as sha512/<hash-in-base-16>.
|
||||
print STDERR "uploading $fn to $mainKey...\n";
|
||||
$bucket->add_key_filename($mainKey, $fn, { 'x-amz-meta-original-name' => $name })
|
||||
or die "failed to upload $fn to $mainKey\n";
|
||||
$cache{$mainKey} = 1;
|
||||
}
|
||||
|
||||
if (scalar @fileNames) {
|
||||
my $res = 0;
|
||||
foreach my $fn (@fileNames) {
|
||||
eval {
|
||||
if (alreadyMirrored("sha512", hashFile("sha512", 0, $fn))) {
|
||||
print STDERR "$fn is already mirrored\n";
|
||||
} else {
|
||||
uploadFile($fn, basename $fn);
|
||||
}
|
||||
};
|
||||
if ($@) {
|
||||
warn "$@";
|
||||
$res = 1;
|
||||
}
|
||||
}
|
||||
exit $res;
|
||||
}
|
||||
|
||||
elsif (defined $expr) {
|
||||
|
||||
# Evaluate find-tarballs.nix.
|
||||
my $pid = open(JSON, "-|", "nix-instantiate", "--eval", "--json", "--strict",
|
||||
"<nixpkgs/maintainers/scripts/find-tarballs.nix>",
|
||||
"--arg", "expr", $expr);
|
||||
my $stdout = <JSON>;
|
||||
waitpid($pid, 0);
|
||||
die "$0: evaluation failed\n" if $?;
|
||||
close JSON;
|
||||
|
||||
my $fetches = decode_json($stdout);
|
||||
|
||||
print STDERR "evaluation returned ", scalar(@{$fetches}), " tarballs\n";
|
||||
|
||||
# Check every fetchurl call discovered by find-tarballs.nix.
|
||||
my $mirrored = 0;
|
||||
my $have = 0;
|
||||
foreach my $fetch (sort { $a->{url} cmp $b->{url} } @{$fetches}) {
|
||||
my $url = $fetch->{url};
|
||||
my $algo = $fetch->{type};
|
||||
my $hash = $fetch->{hash};
|
||||
my $name = $fetch->{name};
|
||||
|
||||
if (defined $ENV{DEBUG}) {
|
||||
print "$url $algo $hash\n";
|
||||
next;
|
||||
}
|
||||
|
||||
if ($url !~ /^http:/ && $url !~ /^https:/ && $url !~ /^ftp:/ && $url !~ /^mirror:/) {
|
||||
print STDERR "skipping $url (unsupported scheme)\n";
|
||||
next;
|
||||
}
|
||||
|
||||
next if defined $exclude && $url =~ /$exclude/;
|
||||
|
||||
if (alreadyMirrored($algo, $hash)) {
|
||||
$have++;
|
||||
next;
|
||||
}
|
||||
|
||||
my $storePath = makeFixedOutputPath(0, $algo, $hash, $name);
|
||||
|
||||
print STDERR "mirroring $url ($storePath)...\n";
|
||||
|
||||
if ($dryRun) {
|
||||
$mirrored++;
|
||||
next;
|
||||
}
|
||||
|
||||
# Substitute the output.
|
||||
if (!isValidPath($storePath)) {
|
||||
system("nix-store", "-r", $storePath);
|
||||
}
|
||||
|
||||
# Otherwise download the file using nix-prefetch-url.
|
||||
if (!isValidPath($storePath)) {
|
||||
$ENV{QUIET} = 1;
|
||||
$ENV{PRINT_PATH} = 1;
|
||||
my $fh;
|
||||
my $pid = open($fh, "-|", "nix-prefetch-url", "--type", $algo, $url, $hash) or die;
|
||||
waitpid($pid, 0) or die;
|
||||
if ($? != 0) {
|
||||
print STDERR "failed to fetch $url: $?\n";
|
||||
next;
|
||||
}
|
||||
<$fh>; my $storePath2 = <$fh>; chomp $storePath2;
|
||||
if ($storePath ne $storePath2) {
|
||||
warn "strange: $storePath != $storePath2\n";
|
||||
next;
|
||||
}
|
||||
}
|
||||
|
||||
uploadFile($storePath, $url);
|
||||
$mirrored++;
|
||||
if ($url !~ /^http:/ && $url !~ /^https:/ && $url !~ /^ftp:/ && $url !~ /^mirror:/) {
|
||||
print STDERR "skipping $url (unsupported scheme)\n";
|
||||
next;
|
||||
}
|
||||
|
||||
print STDERR "mirrored $mirrored files, already have $have files\n";
|
||||
}
|
||||
$url =~ /([^\/]+)$/;
|
||||
my $fn = $1;
|
||||
|
||||
else {
|
||||
usage();
|
||||
if (!defined $fn) {
|
||||
print STDERR "skipping $url (no file name)\n";
|
||||
next;
|
||||
}
|
||||
|
||||
if ($fn =~ /[&?=%]/ || $fn =~ /^\./) {
|
||||
print STDERR "skipping $url (bad character in file name)\n";
|
||||
next;
|
||||
}
|
||||
|
||||
if ($fn !~ /[a-zA-Z]/) {
|
||||
print STDERR "skipping $url (no letter in file name)\n";
|
||||
next;
|
||||
}
|
||||
|
||||
if ($fn !~ /[0-9]/) {
|
||||
print STDERR "skipping $url (no digit in file name)\n";
|
||||
next;
|
||||
}
|
||||
|
||||
if ($fn !~ /[-_\.]/) {
|
||||
print STDERR "skipping $url (no dash/dot/underscore in file name)\n";
|
||||
next;
|
||||
}
|
||||
|
||||
my $dstPath = "$tarballsCache/$fn";
|
||||
|
||||
next if -e $dstPath;
|
||||
|
||||
print "downloading $url to $dstPath...\n";
|
||||
|
||||
next if $ENV{DRY_RUN};
|
||||
|
||||
$ENV{QUIET} = 1;
|
||||
$ENV{PRINT_PATH} = 1;
|
||||
my $fh;
|
||||
my $pid = open($fh, "-|", "nix-prefetch-url", "--type", $algo, $url, $hash) or die;
|
||||
waitpid($pid, 0) or die;
|
||||
if ($? != 0) {
|
||||
print STDERR "failed to fetch $url: $?\n";
|
||||
next;
|
||||
}
|
||||
<$fh>; my $storePath = <$fh>; chomp $storePath;
|
||||
|
||||
die unless -e $storePath;
|
||||
|
||||
cp($storePath, $dstPath) or die;
|
||||
|
||||
my $md5 = hashFile("md5", 0, $storePath) or die;
|
||||
symlink("../$fn", "$tarballsCache/md5/$md5");
|
||||
|
||||
my $sha1 = hashFile("sha1", 0, $storePath) or die;
|
||||
symlink("../$fn", "$tarballsCache/sha1/$sha1");
|
||||
|
||||
my $sha256 = hashFile("sha256", 0, $storePath) or die;
|
||||
symlink("../$fn", "$tarballsCache/sha256/$sha256");
|
||||
|
||||
$sha256 = hashFile("sha256", 1, $storePath) or die;
|
||||
symlink("../$fn", "$tarballsCache/sha256/$sha256");
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ trap "exitHandler" EXIT
|
||||
# fetch the trace and the drvPath of the attribute.
|
||||
nix-instantiate $NIXPKGS -A $attr --show-trace > "$tmp/drvPath" 2> "$tmp/trace" || {
|
||||
cat 1>&2 - "$tmp/trace" <<EOF
|
||||
An error occurred while evaluating $attr.
|
||||
An error occured while evaluating $attr.
|
||||
EOF
|
||||
exit 1
|
||||
}
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i bash -p coreutils findutils gnused nix wget
|
||||
|
||||
set -efuo pipefail
|
||||
|
||||
SRCS=
|
||||
if [ -d "$1" ]; then
|
||||
SRCS="$(pwd)/$1/srcs.nix"
|
||||
. "$1/fetch.sh"
|
||||
else
|
||||
SRCS="$(pwd)/$(dirname $1)/srcs.nix"
|
||||
. "$1"
|
||||
fi
|
||||
|
||||
tmp=$(mktemp -d)
|
||||
pushd $tmp >/dev/null
|
||||
wget -nH -r -c --no-parent "${WGET_ARGS[@]}" >/dev/null
|
||||
|
||||
csv=$(mktemp)
|
||||
find . -type f | while read src; do
|
||||
# Sanitize file name
|
||||
filename=$(basename "$src" | tr '@' '_')
|
||||
nameVersion="${filename%.tar.*}"
|
||||
name=$(echo "$nameVersion" | sed -e 's,-[[:digit:]].*,,' | sed -e 's,-opensource-src$,,')
|
||||
version=$(echo "$nameVersion" | sed -e 's,^\([[:alpha:]][[:alnum:]]*-\)\+,,')
|
||||
echo "$name,$version,$src,$filename" >>$csv
|
||||
done
|
||||
|
||||
cat >"$SRCS" <<EOF
|
||||
# DO NOT EDIT! This file is generated automatically by fetch-kde-qt.sh
|
||||
{ fetchurl, mirror }:
|
||||
|
||||
{
|
||||
EOF
|
||||
|
||||
gawk -F , "{ print \$1 }" $csv | sort | uniq | while read name; do
|
||||
versions=$(gawk -F , "/^$name,/ { print \$2 }" $csv)
|
||||
latestVersion=$(echo "$versions" | sort -rV | head -n 1)
|
||||
src=$(gawk -F , "/^$name,$latestVersion,/ { print \$3 }" $csv)
|
||||
filename=$(gawk -F , "/^$name,$latestVersion,/ { print \$4 }" $csv)
|
||||
url="${src:2}"
|
||||
sha256=$(nix-hash --type sha256 --base32 --flat "$src")
|
||||
cat >>"$SRCS" <<EOF
|
||||
$name = {
|
||||
version = "$latestVersion";
|
||||
src = fetchurl {
|
||||
url = "\${mirror}/$url";
|
||||
sha256 = "$sha256";
|
||||
name = "$filename";
|
||||
};
|
||||
};
|
||||
EOF
|
||||
done
|
||||
|
||||
echo "}" >>"$SRCS"
|
||||
|
||||
popd >/dev/null
|
||||
rm -fr $tmp >/dev/null
|
||||
|
||||
rm -f $csv >/dev/null
|
||||
@@ -1,26 +1,21 @@
|
||||
# This expression returns a list of all fetchurl calls used by ‘expr’.
|
||||
# This expression returns a list of all fetchurl calls used by all
|
||||
# packages reachable from release.nix.
|
||||
|
||||
with import ../.. { };
|
||||
with lib;
|
||||
|
||||
{ expr }:
|
||||
|
||||
let
|
||||
|
||||
root = expr;
|
||||
root = removeAttrs (import ../../pkgs/top-level/release.nix { }) [ "tarball" "unstable" ];
|
||||
|
||||
uniqueUrls = map (x: x.file) (genericClosure {
|
||||
startSet = map (file: { key = file.url; inherit file; }) urls;
|
||||
operator = const [ ];
|
||||
});
|
||||
|
||||
urls = map (drv: { url = head (drv.urls or [ drv.url ]); hash = drv.outputHash; type = drv.outputHashAlgo; name = drv.name; }) fetchurlDependencies;
|
||||
urls = map (drv: { url = head drv.urls; hash = drv.outputHash; type = drv.outputHashAlgo; }) fetchurlDependencies;
|
||||
|
||||
fetchurlDependencies =
|
||||
filter
|
||||
(drv: drv.outputHash or "" != "" && drv.outputHashMode or "flat" == "flat"
|
||||
&& drv.postFetch or "" == "" && (drv ? url || drv ? urls))
|
||||
dependencies;
|
||||
fetchurlDependencies = filter (drv: drv.outputHash or "" != "" && drv ? urls) dependencies;
|
||||
|
||||
dependencies = map (x: x.value) (genericClosure {
|
||||
startSet = map keyDrv (derivationsIn' root);
|
||||
|
||||
95
maintainers/scripts/gnome-latest.sh
Executable file
95
maintainers/scripts/gnome-latest.sh
Executable file
@@ -0,0 +1,95 @@
|
||||
#!/bin/sh
|
||||
|
||||
GNOME_FTP="ftp.gnome.org/pub/GNOME/sources"
|
||||
|
||||
project=$1
|
||||
|
||||
if [ "$project" == "--help" ]; then
|
||||
echo "Usage: $0 project [major.minor]"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
baseVersion=$2
|
||||
|
||||
if [ -z "$project" ]; then
|
||||
echo "No project specified, exiting"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# curl -l ftp://... doesn't work from my office in HSE, and I don't want to have
|
||||
# any conversations with sysadmin. Somehow lftp works.
|
||||
if [ "$FTP_CLIENT" = "lftp" ]; then
|
||||
ls_ftp() {
|
||||
lftp -c "open $1; cls"
|
||||
}
|
||||
else
|
||||
ls_ftp() {
|
||||
curl -l "$1"/
|
||||
}
|
||||
fi
|
||||
|
||||
if [ -z "$baseVersion" ]; then
|
||||
echo "Looking for available versions..." >&2
|
||||
available_baseversions=( `ls_ftp ftp://${GNOME_FTP}/${project} | grep '[0-9]\.[0-9]' | sort -t. -k1,1n -k 2,2n` )
|
||||
echo -e "The following versions are available:\n ${available_baseversions[@]}" >&2
|
||||
echo -en "Choose one of them: " >&2
|
||||
read baseVersion
|
||||
fi
|
||||
|
||||
FTPDIR="${GNOME_FTP}/${project}/${baseVersion}"
|
||||
|
||||
#version=`curl -l ${FTPDIR}/ 2>/dev/null | grep LATEST-IS | sed -e s/LATEST-IS-//`
|
||||
# gnome's LATEST-IS is broken. Do not trust it.
|
||||
|
||||
files=$(ls_ftp "${FTPDIR}")
|
||||
declare -A versions
|
||||
|
||||
for f in $files; do
|
||||
case $f in
|
||||
(LATEST-IS-*|*.news|*.changes|*.sha256sum|*.diff*):
|
||||
;;
|
||||
($project-*.*.9*.tar.*):
|
||||
tmp=${f#$project-}
|
||||
tmp=${tmp%.tar*}
|
||||
echo "Ignored unstable version ${tmp}" >&2
|
||||
;;
|
||||
($project-*.tar.*):
|
||||
tmp=${f#$project-}
|
||||
tmp=${tmp%.tar*}
|
||||
versions[${tmp}]=1
|
||||
;;
|
||||
(*):
|
||||
echo "UNKNOWN FILE $f"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
echo "Found versions ${!versions[@]}" >&2
|
||||
version=`echo ${!versions[@]} | sed -e 's/ /\n/g' | sort -t. -k1,1n -k 2,2n -k 3,3n | tail -n1`
|
||||
echo "Latest version is: ${version}" >&2
|
||||
|
||||
name=${project}-${version}
|
||||
echo "Fetching .sha256 file" >&2
|
||||
curl -O http://${FTPDIR}/${name}.sha256sum
|
||||
|
||||
extensions=( "xz" "bz2" "gz" )
|
||||
echo "Choosing archive extension (known are ${extensions[@]})..." >&2
|
||||
for ext in ${extensions[@]}; do
|
||||
if grep "\\.tar\\.${ext}$" ${name}.sha256sum >& /dev/null; then
|
||||
ext_pref=$ext
|
||||
sha256=$(grep "\\.tar\\.${ext}$" ${name}.sha256sum | cut -f1 -d\ )
|
||||
break
|
||||
fi
|
||||
done
|
||||
sha256=`nix-hash --to-base32 --type sha256 $sha256`
|
||||
echo "Chosen ${ext_pref}, hash is ${sha256}" >&2
|
||||
|
||||
cat <<EOF
|
||||
name = "${project}-${version}";
|
||||
|
||||
src = fetchurl {
|
||||
url = mirror://gnome/sources/${project}/${baseVersion}/${project}-${version}.tar.${ext_pref};
|
||||
sha256 = "${sha256}";
|
||||
};
|
||||
EOF
|
||||
|
||||
rm -v ${name}.sha256sum >&2
|
||||
@@ -1,194 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -o pipefail
|
||||
|
||||
GNOME_FTP="ftp.gnome.org/pub/GNOME/sources"
|
||||
|
||||
# projects that don't follow the GNOME major versioning, or that we don't want to
|
||||
# programmatically update
|
||||
NO_GNOME_MAJOR="gtkhtml gdm"
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 gnome_dir <show project>|<update project>|<update-all> [major.minor]" >&2
|
||||
echo "gnome_dir is for example pkgs/desktops/gnome-3/3.18" >&2
|
||||
exit 0
|
||||
}
|
||||
|
||||
if [ "$#" -lt 2 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
GNOME_TOP="$1"
|
||||
shift
|
||||
|
||||
action="$1"
|
||||
|
||||
# curl -l ftp://... doesn't work from my office in HSE, and I don't want to have
|
||||
# any conversations with sysadmin. Somehow lftp works.
|
||||
if [ "$FTP_CLIENT" = "lftp" ]; then
|
||||
ls_ftp() {
|
||||
lftp -c "open $1; cls"
|
||||
}
|
||||
else
|
||||
ls_ftp() {
|
||||
curl -s -l "$1"/
|
||||
}
|
||||
fi
|
||||
|
||||
find_project() {
|
||||
exec find "$GNOME_TOP" -mindepth 2 -maxdepth 2 -type d $@
|
||||
}
|
||||
|
||||
show_project() {
|
||||
local project="$1"
|
||||
local majorVersion="$2"
|
||||
local version=""
|
||||
|
||||
if [ -z "$majorVersion" ]; then
|
||||
echo "Looking for available versions..." >&2
|
||||
local available_baseversions=( `ls_ftp ftp://${GNOME_FTP}/${project} | grep '[0-9]\.[0-9]' | sort -t. -k1,1n -k 2,2n` )
|
||||
if [ "$?" -ne "0" ]; then
|
||||
echo "Project $project not found" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -e "The following versions are available:\n ${available_baseversions[@]}" >&2
|
||||
echo -en "Choose one of them: " >&2
|
||||
read majorVersion
|
||||
fi
|
||||
|
||||
if echo "$majorVersion" | grep -q "[0-9]\+\.[0-9]\+\.[0-9]\+"; then
|
||||
# not a major version
|
||||
version="$majorVersion"
|
||||
majorVersion=$(echo "$majorVersion" | cut -d '.' -f 1,2)
|
||||
fi
|
||||
|
||||
local FTPDIR="${GNOME_FTP}/${project}/${majorVersion}"
|
||||
|
||||
#version=`curl -l ${FTPDIR}/ 2>/dev/null | grep LATEST-IS | sed -e s/LATEST-IS-//`
|
||||
# gnome's LATEST-IS is broken. Do not trust it.
|
||||
|
||||
if [ -z "$version" ]; then
|
||||
local files=$(ls_ftp "${FTPDIR}")
|
||||
declare -A versions
|
||||
|
||||
for f in $files; do
|
||||
case $f in
|
||||
(LATEST-IS-*|*.news|*.changes|*.sha256sum|*.diff*):
|
||||
;;
|
||||
($project-*.*.9*.tar.*):
|
||||
tmp=${f#$project-}
|
||||
tmp=${tmp%.tar*}
|
||||
echo "Ignored unstable version ${tmp}" >&2
|
||||
;;
|
||||
($project-*.tar.*):
|
||||
tmp=${f#$project-}
|
||||
tmp=${tmp%.tar*}
|
||||
versions[${tmp}]=1
|
||||
;;
|
||||
(*):
|
||||
echo "UNKNOWN FILE $f" >&2
|
||||
;;
|
||||
esac
|
||||
done
|
||||
echo "Found versions ${!versions[@]}" >&2
|
||||
version=`echo ${!versions[@]} | sed -e 's/ /\n/g' | sort -t. -k1,1n -k 2,2n -k 3,3n | tail -n1`
|
||||
if [ -z "$version" ]; then
|
||||
echo "No version available for major $majorVersion" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "Latest version is: ${version}" >&2
|
||||
fi
|
||||
|
||||
local name=${project}-${version}
|
||||
echo "Fetching .sha256 file" >&2
|
||||
local sha256out=$(curl -s -f http://${FTPDIR}/${name}.sha256sum)
|
||||
|
||||
if [ "$?" -ne "0" ]; then
|
||||
echo "Version not found" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
extensions=( "xz" "bz2" "gz" )
|
||||
echo "Choosing archive extension (known are ${extensions[@]})..." >&2
|
||||
for ext in ${extensions[@]}; do
|
||||
if echo -e "$sha256out" | grep -q "\\.tar\\.${ext}$"; then
|
||||
ext_pref=$ext
|
||||
sha256=$(echo -e "$sha256out" | grep "\\.tar\\.${ext}$" | cut -f1 -d\ )
|
||||
break
|
||||
fi
|
||||
done
|
||||
echo "Chosen ${ext_pref}, hash is ${sha256}" >&2
|
||||
|
||||
echo "# Autogenerated by maintainers/scripts/gnome.sh update
|
||||
|
||||
fetchurl: {
|
||||
name = \"${project}-${version}\";
|
||||
|
||||
src = fetchurl {
|
||||
url = mirror://gnome/sources/${project}/${majorVersion}/${project}-${version}.tar.${ext_pref};
|
||||
sha256 = \"${sha256}\";
|
||||
};
|
||||
}"
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
update_project() {
|
||||
local project="$1"
|
||||
local majorVersion="$2"
|
||||
|
||||
# find project in nixpkgs tree
|
||||
projectPath=$(find_project -name "$project" -print)
|
||||
if [ -z "$projectPath" ]; then
|
||||
echo "Project $project not found under $GNOME_TOP"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
src=$(show_project "$project" "$majorVersion")
|
||||
|
||||
if [ "$?" -eq "0" ]; then
|
||||
echo "Updating $projectPath/src.nix" >&2
|
||||
echo -e "$src" > "$projectPath/src.nix"
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
if [ "$action" == "update-all" ]; then
|
||||
majorVersion="$2"
|
||||
if [ -z "$majorVersion" ]; then
|
||||
echo "No major version specified" >&2
|
||||
usage
|
||||
fi
|
||||
|
||||
# find projects
|
||||
projects=$(find_project -exec basename '{}' \;)
|
||||
for project in $projects; do
|
||||
if echo "$NO_GNOME_MAJOR"|grep -q $project; then
|
||||
echo "Skipping $project"
|
||||
else
|
||||
echo "= Updating $project to $majorVersion" >&2
|
||||
update_project $project $majorVersion
|
||||
echo >&2
|
||||
fi
|
||||
done
|
||||
else
|
||||
project="$2"
|
||||
majorVersion="$3"
|
||||
|
||||
if [ -z "$project" ]; then
|
||||
echo "No project specified, exiting" >&2
|
||||
usage
|
||||
fi
|
||||
|
||||
if [ "$action" == "show" ]; then
|
||||
show_project $project $majorVersion
|
||||
elif [ "$action" == "update" ]; then
|
||||
update_project $project $majorVersion
|
||||
else
|
||||
echo "Unknown action $action" >&2
|
||||
usage
|
||||
fi
|
||||
fi
|
||||
1122
maintainers/scripts/gnu/gnupdate
Executable file
1122
maintainers/scripts/gnu/gnupdate
Executable file
File diff suppressed because it is too large
Load Diff
@@ -6,7 +6,6 @@ hydra_eval_jobs \
|
||||
--argstr system i686-linux \
|
||||
--argstr system x86_64-darwin \
|
||||
--argstr system i686-cygwin \
|
||||
--argstr system x86_64-cygwin \
|
||||
--argstr system i686-freebsd \
|
||||
--arg officialRelease false \
|
||||
--arg nixpkgs "{ outPath = builtins.storePath ./. ; rev = 1234; }" \
|
||||
|
||||
22
maintainers/scripts/map-files.pl
Normal file
22
maintainers/scripts/map-files.pl
Normal file
@@ -0,0 +1,22 @@
|
||||
#! /usr/bin/perl -w
|
||||
|
||||
use strict;
|
||||
|
||||
my %map;
|
||||
open LIST1, "<$ARGV[0]" or die;
|
||||
while (<LIST1>) {
|
||||
/^(\S+)\s+(.*)$/;
|
||||
$map{$1} = $2;
|
||||
}
|
||||
|
||||
open LIST1, "<$ARGV[1]" or die;
|
||||
while (<LIST1>) {
|
||||
/^(\S+)\s+(.*)$/;
|
||||
if (!defined $map{$1}) {
|
||||
print STDERR "missing file: $2\n";
|
||||
next;
|
||||
}
|
||||
print "$2\n";
|
||||
print "$map{$1}\n";
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#! /bin/sh
|
||||
|
||||
echo "let pkgs = import <nixpkgs$2> {}; x = pkgs.callPackage $1 { $3 }; in ${4:-x}" |
|
||||
echo "let pkgs = import /etc/nixos/nixpkgs$2 {}; x = pkgs.callPackage $1 { $3 }; in ${4:-x}" |
|
||||
nix-instantiate --show-trace - |
|
||||
xargs nix-store -r -K
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user