Compare commits

..

478 Commits

Author SHA1 Message Date
Domen Kožar
d231868990 changelog: add all new NixOS modules 2016-03-31 23:00:24 +01:00
ne0phyte
5c5e904763 kicad: 2013 stable -> 4.0.2
(cherry picked from commit cce37d2164)
2016-03-31 23:11:47 +02:00
Joachim Fasting
a8e92de019 electrum: 2.6.2 -> 2.6.3
(cherry picked from commit 0e05d552f8)
Upstream tends to outright close tickets filed against anything but the
latest version.
2016-03-31 23:11:00 +02:00
Lancelot SIX
70bab96717 gnupg20: 2.0.29 -> 2.0.30
See https://lists.gnupg.org/pipermail/gnupg-announce/2016q1/000385.html

(cherry picked from commit d6f9e35683)
2016-03-31 20:50:37 +02:00
Nikolay Amiantov
9ac86f947e steam: use old C++ ABI for primus
(cherry picked from commit 0276a8b2d2)
2016-03-31 19:53:21 +03:00
Nikolay Amiantov
bd19f47b68 primus: propagate stdenv to primusLibs
(cherry picked from commit 9b7edbeb2f)
2016-03-31 19:53:15 +03:00
Nikolay Amiantov
b0d9cb36de stdenvAdapters.useOldCXXAbi: add new adapter
(cherry picked from commit 9134f9358a)
2016-03-31 19:53:08 +03:00
Joachim Fasting
4a021a017d kdevplatform: disable parallel build
Hotfix for 1edb9b9558
Ref: https://github.com/NixOS/nixpkgs/pull/13843

(cherry picked from commit cd7242d09d)
Hydra has a failure on 16.03, too:
http://hydra.nixos.org/build/33898803/nixlog/1/raw
2016-03-31 18:03:53 +02:00
Vladimír Čunát
6465c790e2 partimage: fix build with openssl-1.0.2
...by using patch from Arch (taken from Debian).

(cherry picked from commit 1186bffb7f)
2016-03-31 17:57:42 +02:00
Franz Pletz
38fca2124a php: 7.0.2 -> 7.0.5 (security)
https://secure.php.net/ChangeLog-7.php#7.0.5

(cherry picked from commit fc1e886f1b)
2016-03-31 16:09:03 +02:00
Michael Raskin
5fa1475d12 davfs2: 1.4.7 -> 1.5.2; fixes the build
(cherry picked from commit cdb2bc77c4)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-31 13:48:26 +01:00
Domen Kožar
0263e6efec qemu: 2.5.0 -> 2.5.1
Hopefully this also fixes installer tests on i686

(cherry picked from commit 8a34a3b37a)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-31 13:48:08 +01:00
Eelco Dolstra
65075167ce NixOS manual: Add some release notes
(cherry picked from commit e60be0923b)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-31 13:47:12 +01:00
Eelco Dolstra
a2b526d41a Fix the boot-ec2-config test
(cherry picked from commit 1783e33b06)
2016-03-31 13:32:56 +02:00
Pascal Wittmann
daf0729f3a nixos/manpages: enable linebreaking after slashes
Allow linbreaks after slashes in long URLs. The option used
is documented at

   http://docbook.sourceforge.net/release/xsl/current/doc/manpages/man.break.after.slash.html

This commit fixes #4538.

(cherry picked from commit 8ddfab0cf2)
2016-03-31 11:29:05 +02:00
Tuomas Tynkkynen
2d840dad4c ios-cross-compile: Don't build on hydra
http://hydra.nixos.org/build/33505267/nixlog/1/raw

(cherry picked from commit c12f63821f)
2016-03-31 07:22:54 +03:00
Vladimír Čunát
2253d675d7 hhvm: disable parallel building
/cc #14151.
http://hydra.nixos.org/build/33846692/nixlog/1/tail

(cherry picked from commit 254e2cc982)
2016-03-31 00:56:48 +02:00
Domen Kožar
26ac90e6ad Merge pull request #14264 from ttuegel/emacs-release-16.03
emacsPackagesNg: remove compatibility cl-lib
2016-03-30 23:33:18 +01:00
Domen Kožar
864b2d6aae perlPackages.UnicodeString: fix build (also imapsync)
(cherry picked from commit 1845159705)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-30 21:07:53 +01:00
Luca Bruno
2173c47506 namazu: mark as broken
(cherry picked from commit 04d4d0000f)
2016-03-30 21:54:41 +02:00
Eelco Dolstra
792bbd380e Fix the EC2 test
We now generate a qcow2 image to prevent hitting Hydra's output size
limit. Also updated /root/user-data -> /etc/ec2-metadata/user-data.

http://hydra.nixos.org/build/33843133
(cherry picked from commit 0d3738cdcc)
2016-03-30 21:51:15 +02:00
Rickard Nilsson
5feeab1d57 nixos/filesystems: Fix fs options type error
(cherry picked from commit 6ff5821be6)
2016-03-30 21:51:10 +02:00
Luca Bruno
556e1b892b glib-tested: suppress gdbus test needing machine-id
(cherry picked from commit 184b7ba3c6)
2016-03-30 21:29:19 +02:00
Luca Bruno
33fe6ee6a6 oprofile: depend on libiberty_static
(cherry picked from commit 18918507f2)
2016-03-30 21:00:53 +02:00
Luca Bruno
77a465e98f libiberty: add static variant
(cherry picked from commit 088231fe9f)
2016-03-30 21:00:46 +02:00
Domen Kožar
b0f8e15376 remove erlangR14: outdated and doesn't build
(cherry picked from commit a1cfdb9c88)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-30 18:55:20 +01:00
Lluís Batlle i Rossell
4254bb9828 Fix evaluation. (licenses, not licences)
Thanks Domen.

(cherry picked from commit 0c98b52816)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-30 18:52:00 +01:00
Lluís Batlle i Rossell
e59d4141c6 Updating wings and dependencies to match erlang
I updated the erlang esdl lib, added the erlang cl lib,
added opencl-headers and ocl-icd to make wings build and run.

I have not tested its opencl part; I only added dependencies so
it builds.

(cherry picked from commit f6a44bea9e)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-30 18:51:47 +01:00
Michael Raskin
162556c9c6 perl-Alien-Wx: pass ModuleBuild dependency
(cherry picked from commit af3ec2046a)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-30 18:44:15 +01:00
Domen Kožar
d2e4593240 manual: use a better relaxng validation tool #4966
(cherry picked from commit ccdda96c2f)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-30 18:37:29 +01:00
Graham Christensen
e8c74f4c61 jenkins: copy .war to $out, fixes #14137
(cherry picked from commit 0b8dd57694)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-30 18:37:15 +01:00
Ambroz Bizjak
32889165e0 pythonPackages/power: 1.2 -> 1.4
This fixes the build both for Python 2.7 and 3.

(cherry picked from commit 492c826a5d)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-30 18:36:51 +01:00
Aristid Breitkreuz
7cf7ed166c Finance::Quote: 1.37 -> 1.38 & add missing dependency on CGI
(cherry picked from commit 63032dae9d)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-30 18:13:16 +01:00
Eelco Dolstra
23489b34c0 Bring back $SSL_CERT_FILE
Commit 9f358f809d removed
$SSL_CERT_FILE, which is fine for binaries linking against the current
OpenSSL package, but not old binaries (e.g. those installed via
nix-env). So let's keep $SSL_CERT_FILE for a while longer.
2016-03-30 16:37:18 +02:00
Eelco Dolstra
badecc4c42 nixos-rebuild: Fix Nix fallback
Somebody forgot that Bash is not a real programming language...

(cherry picked from commit c94f8a4abd)
2016-03-30 16:37:03 +02:00
aszlig
7133dcdd28 chromium: Update all channels to latest versions
Overview of the updated versions:

stable: 49.0.2623.87 -> 49.0.2623.110
beta:   50.0.2661.26 -> 50.0.2661.49
dev:    50.0.2661.18 -> 51.0.2693.2

Most notably, this includes a series of urgent security fixes:

 * CVE-2016-1646: Out-of-bounds read in V8. Credit to Wen Xu from
                  Tencent KeenLab.
 * CVE-2016-1647: Use-after-free in Navigation. Credit to anonymous.
 * CVE-2016-1648: Use-after-free in Extensions. Credit to anonymous.
 * CVE-2016-1649: Buffer overflow in libANGLE. Credit to lokihardt
                  working with HP's Zero Day Initiative / Pwn2Own.
 * CVE-2016-1650: Denial of service in PageCaptureSaveAsMHTMLFunction

The official release announcement with details about these fixes can be
found here:

http://googlechromereleases.blogspot.de/2016/03/stable-channel-update_24.html

Beta and stable could be also affected, although I didn't do a detailed
check whether that's the case.

As this introduces Chromium 51 as the dev version, I had to make the
following changes to make it build:

 * libexif got removed, so let's do that on our end as well.
   See https://codereview.chromium.org/1803883002 for details.
 * Chromium doesn't seem to compile with our version of libpng, so let's
   resort to the bundled libpng for now.
 * site_engagement_ui.cc uses isnan outside of std namespace, so
   we're fixing that in postPatch using sed.

I have successfully built all versions on i686-linux and x86_64-linux
and tested it using the VM tests.

Test reports can be found at the following evaluation of my Hydra:

https://headcounter.org/hydra/eval/314584

Thanks to @grahamc for reporting this.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
Reported-by: Graham Christensen <graham@grahamc.com>
Fixes: #14299
(cherry picked from commit ef753d210e)
2016-03-30 15:27:44 +02:00
Domen Kožar
6824d79c69 Merge pull request #14316 from ttuegel/kde5-release-16.03
kde5.applications: 15.12.1 -> 15.12.3
2016-03-30 13:02:03 +01:00
Vladimír Čunát
36107e6f03 qt4-clang: fix fallout from f9b5ed66d1
There's still the question of Hydra binaries etc. but this should at
least fix evaluation and running problems.

(cherry picked from commit 8ffe681713)
2016-03-30 13:28:46 +02:00
Franz Pletz
5b83e496c8 dhcpcd: 6.9.4 -> 6.10.1 (security)
Fixes CVE-2016-1503 & CVE-2016-1504.

Changelog:
  - http://roy.marples.name/archives/dhcpcd-discuss/2016/1143.html
  - http://roy.marples.name/archives/dhcpcd-discuss/2016/1146.html

(cherry picked from commit 5aa986fba2)

See #14313.
2016-03-30 13:08:00 +02:00
Eelco Dolstra
24d68896e4 Inline qt4-clang
This prevents a potential name/version conflict in nix-env, and a
large Hydra build for a proprietary package.

(cherry picked from commit f9b5ed66d1)
2016-03-30 11:26:17 +02:00
Eelco Dolstra
115cb2f780 openvpn: 2.3.8 -> 2.3.10
In particular, this fixes the systemd-ask-password regression
re-introduced by cb1c818491.

(cherry picked from commit 38afa836b3)
2016-03-30 11:26:17 +02:00
Eelco Dolstra
e61b8b2294 Catalyst::Action::Rest: 1.19 -> 1.20
(cherry picked from commit 6e08bd27fc)
2016-03-30 11:26:17 +02:00
Eelco Dolstra
79c3c16dcb Restore core dumps
Systemd 229 sets kernel.core_pattern to "|/bin/false" by default,
unless systemd-coredump is enabled. Revert back to the default of
writing "core" in the current directory.

(cherry picked from commit 54ca7e9f75)
2016-03-30 11:26:17 +02:00
Vladimír Čunát
8f9f2347d3 firefox-esr: fix build after 574a6d34d2
We're now using only newer versions that have ./configure in the root.
${pname} isn't the correct directory name for esr versions.

(cherry picked from commit ec4685cf70)
2016-03-30 11:26:17 +02:00
Eelco Dolstra
aeab34ccd2 firefox-esr: 38.6.1 -> 45.0.1
(cherry picked from commit 574a6d34d2)
2016-03-30 11:26:17 +02:00
Eelco Dolstra
8bd86b91d9 firefox: 45.0 -> 45.0.1
(cherry picked from commit 79d6dc91fe)
2016-03-30 11:26:17 +02:00
Peter Simons
5b86bfc58d ghc: version 6.12.3 is broken after updating to gcc 5.x
http://hydra.nixos.org/build/33627548
(cherry picked from commit 070b123d4b)
2016-03-30 10:59:12 +02:00
Vladimír Čunát
2aba37aaf0 intltool: fix problems with perl-5.22
http://hydra.nixos.org/build/33608086/nixlog/1/raw
(cherry picked from commit 117183e27e)
2016-03-30 10:44:48 +02:00
Vladimír Čunát
ccc2c7c9e5 mesa: maintenance update 11.1.1 -> 11.1.2
(cherry picked from commit f4cb39c3d3)
2016-03-30 10:44:43 +02:00
Peter Simons
d665da8ea6 Disable broken Haskell builds some more. 2016-03-29 21:59:06 +02:00
Nikolay Amiantov
f3319286ff xserver service: add glamoregl for intel drivers
Closes #14286

Credits to vcunat for the initial patch.

(cherry picked from commit 63f1eb6b00)
2016-03-29 19:04:11 +03:00
Peter Simons
c1818c2963 Disable broken Haskell builds. 2016-03-29 17:12:45 +02:00
Nikolay Amiantov
bf65250cdb Revert "Remove PATH assumption from fhs-userenv."
This reverts commit 2f26b82411.

This breaks terminfo in Bash for some reason (i.e. TAB and other
special keys).

(cherry picked from commit a5322efd95)
2016-03-29 17:58:36 +03:00
Thomas Tuegel
83d6492b02 kde5.applications: 15.12.1 -> 15.12.3
(cherry picked from commit 7079075d4caab68a22e8a1aac82df774f1d99d58)
2016-03-29 09:49:50 -05:00
Vladimír Čunát
12528e547f manual rl-16.03: document broadcom issue #12595
(cherry picked from commit d9b98b6b50)
2016-03-29 16:33:49 +02:00
Peter Simons
cc8278e186 Document the fact that the firewall allows pings by default in rl-1603.xml.
(cherry picked from commit 9a2ee42f52)
2016-03-29 16:15:17 +02:00
Domen Kožar
2e7727f647 nixos/lib/testing.nix: make 'config' a free variable
cc @edolstra

(cherry picked from commit c56c3b6596)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-29 14:35:56 +01:00
Nikolay Amiantov
90748d0983 avidemux: don't depend on unfree FAAC by default
(cherry picked from commit c2c1ef89cd3c2e232d744b034e685fb10cd23327)
2016-03-29 16:15:55 +03:00
Nikolay Amiantov
0432279805 avidemux: 2.5.6 -> 2.6.12
(cherry picked from commit 8b0076b887)

Fixes i686-linux builds.
2016-03-29 16:01:39 +03:00
Brian McKenna
6af1cd202d steam: enable hardware decoding (for In-Home Streaming)
Previous to this patch I was getting software decoding when I used In-Home
Streaming. I had a look around and according to:

https://bbs.archlinux.org/viewtopic.php?id=187922

> It seems that the libva version Steam comes with, is not compatible anymore to
> the newer libva 1.4.0.

Substituting in our version of libva gives me hardware decoding!

(cherry picked from commit d47e2fde69)
2016-03-29 15:47:56 +03:00
Vladimír Čunát
445e5c4ca5 rustc: disable parallel building
http://hydra.nixos.org/build/33119905/nixlog/1/raw

(cherry picked from commit d7e87db0c7)
2016-03-29 13:02:22 +02:00
Vladimír Čunát
60622584db xmlsec: fix linkage, probably after #909
This fixes builds of (some) reverse dependencies, e.g. aqbanking.

(cherry picked from commit e69306c463)
2016-03-29 12:23:10 +02:00
Vladimír Čunát
6135eafe30 pythonPackages.poppler-qt4: fix build by a hack
/cc maintainer @sepi.

(cherry picked from commit 5147b9d30a)
2016-03-29 11:57:01 +02:00
Vladimír Čunát
e19d01d6c6 dvdisaster: disable parallel building
http://hydra.nixos.org/build/33609373/nixlog/1/raw
/cc @nkcx. I notified upstream.

(cherry picked from commit 62c29908da)
2016-03-29 10:54:46 +02:00
taku0
c9f09d2b77 thunderbird-bin: 38.6.0 -> 38.7.1
(cherry picked from commit dc73280d19)

Security & bug fixes

See https://www.mozilla.org/en-US/thunderbird/38.7.0/releasenotes/
and https://www.mozilla.org/en-US/thunderbird/38.7.1/releasenotes/
2016-03-29 02:34:58 +02:00
Robin Gloster
0f7088161a vacuum: fix build 2016-03-28 23:33:47 +00:00
Nikolay Amiantov
fcd16856b4 haskellPackages.mueval: fix build
(cherry picked from commit d94ffd5655394131d292780cb5e82dc13fcd6d2b)
2016-03-29 02:24:23 +03:00
Domen Kožar
7f2dc5d3f6 garden 2016-03-29 00:15:27 +01:00
Domen Kožar
2267e14d68 Merge pull request #14280 from therealpxc/backport-new-robotics-pkgs
Backport new robotics pkgs
2016-03-29 00:12:19 +01:00
Patrick Callahan
ac5f358c00 genromfs: init at 0.5.2 2016-03-28 15:42:55 -07:00
Patrick Callahan
4ae5327664 gazebo: init at 6.5.1 and 7.0.0 2016-03-28 15:42:05 -07:00
Patrick Callahan
3fd3be4408 ignition.transport: init at 0.9.0 and 1.0.1 2016-03-28 15:42:05 -07:00
Patrick Callahan
f50a79045c sdformat: init at 3.7.0 and 4.0.0 2016-03-28 15:42:05 -07:00
Patrick Callahan
1f379d95e4 ignition robotics libs: init; .math: init at 2.3.0 2016-03-28 15:42:04 -07:00
Patrick Callahan
a6f5f1efad tinyxml-2: init at 3.0.0 2016-03-28 15:42:04 -07:00
Patrick Callahan
20aa39d18d ogre: 1.9.0 -> 1.9-hg 2016-03-28 15:42:04 -07:00
Patrick Callahan
1d72ddad9d qgroundcontrol: init at 2.9.4 2016-03-28 15:42:04 -07:00
=
4e2f29507f meterbridge: fix gcc-5 build (thanks to: http://ports.ubuntu.com/pool/universe/m/meterbridge/)
(cherry picked from commit c7a26ccf9d)
2016-03-28 22:32:19 +02:00
taku0
979bef5356 oraclejdk: 8u73, 8u74 -> 8u77
(cherry picked from commit b8cc111764)

Security fix for CVE-2016-0636.

http://www.oracle.com/technetwork/java/javase/8u77-relnotes-2944725.html
2016-03-28 21:25:56 +02:00
Joachim Fasting
d022dc9300 lsh: fix gcc5 build
The build fails with c11 (also tested c99), but works with gnu90.

(cherry picked from commit 8bd72dfaa4)
2016-03-28 19:06:32 +02:00
Brad Ediger
3a13d4707b spotify: 1.0.25.127 -> 1.0.26.125
(cherry picked from commit 066042e3fa)
2016-03-28 18:27:45 +02:00
Joachim Fasting
713af5258f v8: fix build(s)
Ignore errors due to strict-overflow warnings; strip clang-only flag on
non-clang builds. Concerning the latter "fix", it's not entirely clear to me why
the -Wno-format-pedantic flag ends up being passed to gcc, the .gyp file appears
to already condition the inclusion of this flag on whether cc=clang.

(cherry picked from commit 72b5bfda97)
2016-03-28 18:04:49 +02:00
Domen Kožar
70a3a43ed8 Merge pull request #14268 from grahamc/failingtests-16.03
ikiwiki: Fix failing dependency builds (16.03)
2016-03-28 15:38:27 +01:00
Ambroz Bizjak
caffe29c3e wxPython: Fix runtime error due to library dependencies not in RUNPATH.
I think what's happening is that the linker automatically adds DT_NEEDED dependencies to some libraries because it finds these libraries are being used directly, but
because they're not linked explicitly with -lflags, the gcc wrapper does not add them to RUNPATH.
2016-03-28 16:28:53 +02:00
Graham Christensen
ddede54d49 perlPackage.RTClientREST: Depend on CGI
(cherry picked from commit 125ee11a35)
2016-03-28 09:20:25 -05:00
Graham Christensen
1d05903577 I18NLangTags: Removed, as this version is 12 years old and is now bundled with core.
(cherry picked from commit c3d6b5e8f2)
2016-03-28 09:20:25 -05:00
Graham Christensen
e3def23b8f perlPackages.DateTimeFormatDateParse: Depend on ModuleBuild
(cherry picked from commit 36b88f8df7)
2016-03-28 09:20:25 -05:00
Graham Christensen
65f4afba5e perlPackages.DataSerializer: Depend on ModuleBuild
(cherry picked from commit 200ddaa54f)
2016-03-28 09:20:25 -05:00
Graham Christensen
191df96839 perlPackages.ScalarString: Depend on ModuleBuild
(cherry picked from commit ce5914c898)
2016-03-28 09:20:24 -05:00
Graham Christensen
ad2acc8fcd perlPackages.ParamsClassify: Depend on ModuleBuild
(cherry picked from commit aa73eadc1c)
2016-03-28 09:20:24 -05:00
Graham Christensen
ec561c6075 perlPackages.NetOpenIDConsumer: Depend on CGI
(cherry picked from commit 849e743040)
2016-03-28 09:20:24 -05:00
Graham Christensen
0f59068762 perlPackages.HTTPLite: Depend on ModuleBuild
(cherry picked from commit df9a6362ea)
2016-03-28 09:20:24 -05:00
Graham Christensen
e32aabe4f1 perlPackages.DataFloat: Depend on ModuleBuild
(cherry picked from commit 9fc9ede52d)
2016-03-28 09:20:24 -05:00
Graham Christensen
d8fe0c9790 perlPackages.DataEntropy: Depend on ModuleBuild
(cherry picked from commit 4532a2a75d)
2016-03-28 09:20:24 -05:00
Graham Christensen
3816f98e0a perlPackages.CryptEksblowfish: Depend on ModuleBuild
(cherry picked from commit 5dd946ded9)
2016-03-28 09:20:23 -05:00
Graham Christensen
15c7003f07 perlPackages.ClassMix: Depend on CGI
(cherry picked from commit 189d29e5f6)
2016-03-28 09:20:23 -05:00
Graham Christensen
1ff2835e5a perlPackages.AuthenPassphrase: Depend on ModuleBuild
(cherry picked from commit fb7ebfb8a6)
2016-03-28 09:14:14 -05:00
Graham Christensen
153fdba2c4 perlPackages.AuthenDecHpwd: Depend on ModuleBuild
(cherry picked from commit 2d182a2992)
2016-03-28 09:13:56 -05:00
Graham Christensen
f45acd3831 perlPackages.DataInteger: depend on ModuleBuild
(cherry picked from commit f11fd4a476)
2016-03-28 09:13:48 -05:00
Vladimír Čunát
e7ffa6c42e clang-3.5: mark as broken
It seems unlikely someone will want to fix it anymore.

(cherry picked from commit be447475d3)
2016-03-28 14:43:02 +02:00
Vladimír Čunát
af551f2dea qt55.vlc: fix build
I tested it on a video to make sure the bug doesn't appear.

(cherry picked from commit ed47bb1ca8)
2016-03-28 14:37:52 +02:00
Andrew Kelley
538d9b5d6d llvmPackages: add 3.8.0 (close #13801)
vcunat's review:
 - let's not switch the default versions of llvm* for now
 - the only changes I see is adding python to clang's buildInputs
   and using the big so-file as discussed in #12759
   (BUILD_SHARED_LIBS -> LLVM_LINK_LLVM_DYLIB)
 - in future it will be nice to split libLLVM into a separate output

(cherry picked from commit f5fe051c71)
2016-03-28 13:52:24 +02:00
Thomas Tuegel
c5583a4540 emacsPackagesNg: remove compatibility cl-lib
ELPA has the compatibility library cl-lib-0.5 which interferes with the
builtin cl-lib-1.0.

(cherry picked from commit 6c05554b85)
2016-03-28 06:31:07 -05:00
aszlig
f12e91f2c5 chromium: Link using gold linker flags
I originally wanted to do this a long time (a31301d) but IIRC back then
it didn't compile. Nowadays with the splitup of the gold linking flags
and the binutils integration, it's merely just a switch to flip, so
let's do that.

Only tested it by building against the current Chromium stable version
on 64bit, because right now builds on Hydra seem to time out (because of
this?) anyway so we have nothing to lose here.

The linking time was hereby reduced from >30 minutes (I didn't measure
it exactly but looked half an hour later to the build progress and it
was *still* linking) to about a few seconds, which I guess is even
though the measurement is quite bogus a tremendous improvement
nonetheless.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit f9fff51c2a)
2016-03-28 11:42:07 +02:00
Vladimír Čunát
04e2196d82 glu: fix the pkg-config file (fixes #14260)
(cherry picked from commit a7d34e0c13)
2016-03-28 11:34:05 +02:00
Domen Kožar
7877e33af8 fix munin (and the test), refs #12801 #13999 2016-03-28 10:26:06 +01:00
Michael Raskin
ee6a568a14 Fix Midori build
(cherry picked from commit 891fa19e29)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-28 09:37:30 +01:00
Michael Raskin
6dd99f177c lilypond: set some HOME during the build for Metafont
(cherry picked from commit 1a97cfb91f)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-28 09:37:26 +01:00
Vladimír Čunát
2647742e02 Revert "texlive.combine: patch paths into texmf.cnf"
This reverts commit 7e74fad881.
Let's revert this in the release, at least for now. It seems the change
isn't perfect and causes some problems.
2016-03-28 08:45:08 +02:00
Nikolay Amiantov
64fd93763a skype: use clang-built qt4 to fix segfault
(cherry picked from commit 28af80fcc0)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-27 20:26:25 +01:00
Joachim Fasting
4476d74db6 linux_grsec_3_14: mark as broken
First, The patch is outdated, I failed to find it anywhere in the mirror repos.
Second, the build fails, and while it may be "fixed" by ad-hoc patching (it
appears to simply need some missing includes), this would mean shipping a
potentially insecure software package. Given that the only reason to use
grsecurity is security, this is both misleading and exposes users to undue risk.
Finally, the build has been broken for quite a long time with no complaints,
leading me to believe that the number of actual users is quite low.

(cherry picked from commit dd16dcbba4)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-27 20:25:33 +01:00
Nikolay Amiantov
9b79dd6bf4 pgadmin: enable parallel building
(cherry picked from commit 7b82f5a3fb)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-27 20:25:16 +01:00
Joachim Fasting
30745200d3 lttng-modules: mark as broken on kernel version <3.18
On linux 3.14, we get errors like
  error: 'struct snd_soc_codec' has no member named 'name'
     __string( name,  codec->CODEC_NAME_FIELD )
indicating that the module is incompatible with the linux API
in this kernel version.

See https://hydra.nixos.org/build/33102405/nixlog/1/raw

(cherry picked from commit a452b43ee5)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-27 20:25:06 +01:00
Domen Kožar
7bd9321c6d perlPackages.TestMockModule: fix build 2016-03-27 20:22:52 +01:00
Domen Kožar
bb6d3c16d3 nixUnstable: bump 2016-03-27 20:22:52 +01:00
Nikolay Amiantov
7e74fad881 texlive.combine: patch paths into texmf.cnf
...instead of environment variables. Close #12768.

(cherry picked from commit acf664814e)
2016-03-27 21:20:32 +02:00
Nikolay Amiantov
471e755cc2 Revert "codeblocks: fix build"
This reverts commit fd9416fb4b.

This shouldn't be needed now that we properly link wxGTK.

(cherry picked from commit 4e9ddd3770)
2016-03-27 21:01:27 +03:00
Nikolay Amiantov
3e30c35937 wxgtk: explicitly link to libX11 and libcairo
(cherry picked from commit 5471eed63c)
2016-03-27 21:01:27 +03:00
Nikolay Amiantov
5ce9f24ded skype: small cleanup
(cherry picked from commit 3819384395)
2016-03-27 21:01:27 +03:00
Pascal Wittmann
9cb9a6b509 ispc: fix one error by adding glibc32
llvm linking errors remain
2016-03-27 18:03:40 +02:00
Vladimír Čunát
da25f05fc2 texlive: document in nixpkgs manual
Fixes #13240. It's not really better than source-code comments it replaced,
but it's in a better accessible place.

(cherry picked from commit e3da83297f)
2016-03-27 14:33:40 +02:00
Joachim Fasting
843d11b292 codeblocks: fix build
https://hydra.nixos.org/build/33633573/nixlog/1/raw
(cherry picked from commit fd9416fb4b)
2016-03-27 10:30:31 +02:00
Joachim Fasting
424d1aff43 linux_chromiumos: require 64bit build host
I noticed that almost all the Hydra build failures were on i686. Sure
enough, upstream says that you need an x86_64 machine to build the
kernel.

(cherry picked from commit bd9737cc3e)
2016-03-27 10:29:46 +02:00
Joachim Fasting
e332f57678 accelio: mark as broken on grsec kernels
All hydra builds against grsec kernels fail.

(cherry picked from commit 8f261d717d)
2016-03-27 10:29:45 +02:00
Joachim Fasting
6a45a297b9 lttng-modules: mark as broken on grsec
All hydra builds against grsec kernels fail; seemingly because
the PaX hardening plugins are incompatible with lttng-modules
(the code writes to locations marked as read-only).

(cherry picked from commit 1939256550)
2016-03-27 10:29:45 +02:00
Joachim Fasting
207131488e rtl8812au: mark as broken on grsec kernels
All hydra builds against grsec kernels fail; builds against vanilla
kernels work.

(cherry picked from commit 2182fd52ad)
2016-03-27 10:29:45 +02:00
Joachim Fasting
ea8311a366 spl: mark as broken on grsec kernels
All hydra builds against grsec kernels fail; non-grsec kernels
succeed.

(cherry picked from commit 2a097803d4)
2016-03-27 10:29:44 +02:00
Joachim Fasting
bd58129a12 openafsClient: mark as broken on unsupported kernels
Sandboxed builds against linux 3.14 and 4.4 fail; 3.18.29 and 4.3
succeed.  From this, I conclude that 4.3 is the latest supported
version, while the lower bound is set to the oldest kernel in
nixpkgs >3.14 (the changelog does not indicate otherwise).

It appears that openafs-client is simply incompatible with grsec;
all hydra builds of openafs-client on grsec fail; local sandboxed
builds against grsec with the most recent openafs-client also fail.

(cherry picked from commit b741198116)
2016-03-27 10:29:44 +02:00
Joachim Fasting
bfece38f51 openafsClient: 1.6.14 -> 1.6.17
According to the changelog, the delta between these versions contains
fixes for several CVEs.

See https://www.openafs.org/dl/openafs/1.6.17/RELNOTES-1.6.17
and https://www.openafs.org/dl/openafs/1.6.16/RELNOTES-1.6.16
and https://www.openafs.org/dl/openafs/1.6.15/RELNOTES-1.6.15

(cherry picked from commit df0481276d)
2016-03-27 10:29:43 +02:00
Joachim Fasting
3af4a10350 hugin: add missing dependencies
https://hydra.nixos.org/build/33609995/nixlog/2/raw
(cherry picked from commit 29c3314fe4)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-26 20:23:58 +00:00
Robert Scott
076dd96812 osrm-backend: add patch fixing build by un-hard-coding gcc-ar and gcc-ranlib paths
(cherry picked from commit 7a3e154c27)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-26 20:12:38 +00:00
Robert Scott
939612898d osrm-backend: switch src to use fetchFromGitHub
(cherry picked from commit 0fdf7106e5)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-26 20:12:36 +00:00
Joachim Fasting
7f7f3c1bc2 io: fix gcc5 build
c11 inline semantics breaks the build

See https://github.com/stevedekorte/io/issues/316
and https://hydra.nixos.org/build/33606216/nixlog/1/raw

(cherry picked from commit 3fe86ac582)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-26 20:11:57 +00:00
Joachim Fasting
32f6c6be6b accelio: kernel 4.2 is the most recent supported kernel
All Hydra builds on more recent kernels fail; from reading
the accelio documentation, I get the impression that 4.2 is
the most recent supported kernel version.

(cherry picked from commit 74838cd03d)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-26 20:11:21 +00:00
Joachim Fasting
06416457c2 jool: mark broken for kernel versions > 4.3
All hydra builds for kernel version >4.3 fail; the build failure
indicates changes to the kernel API used by the package.

(cherry picked from commit eeca73dfac)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-26 20:11:21 +00:00
Joachim Fasting
bc393f79eb gsb: mark as broken
No active maintenance for several years; dependencies cannot be met.

(cherry picked from commit 1379baca94)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-26 20:11:21 +00:00
Joachim Fasting
e96ce2c60e perf: fix build
https://hydra.nixos.org/build/33553564/nixlog/1/raw
(cherry picked from commit 89c6b3c11a)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-26 20:11:21 +00:00
Joachim Fasting
be5b364581 grsecurity: fix gcc plugin
Also needs mpfr and libmpc

(cherry picked from commit 304c4a514e)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-26 20:11:21 +00:00
Joachim Fasting
06efe48e33 udftools: fix gcc5 build
-fgnu89-inline was insufficient, revert to -std=gnu90
See https://hydra.nixos.org/build/33103604/nixlog/1/raw

Also fix various undefined reference errors by ad-hoc patching

(cherry picked from commit c13ddd14bd)
2016-03-26 18:24:00 +01:00
Joachim Fasting
04c2762461 yad: ad-hoc patching to fix undefined reference errors
(cherry picked from commit cb896a1e7f)
2016-03-26 18:24:00 +01:00
Joachim Fasting
d6ac1e6d51 yad: fix gcc5 build
https://hydra.nixos.org/build/33612450/nixlog/1/raw
(cherry picked from commit 0b060bdf68)
2016-03-26 18:23:59 +01:00
Joachim Fasting
84fbe0d6d7 zoom: fix gcc5 build
https://hydra.nixos.org/build/33122239/nixlog/1/raw
(cherry picked from commit a4ed052407)
2016-03-26 18:23:59 +01:00
Joachim Fasting
916531dfd1 leocad: work around cmath problem
introduced by recent glibc

https://hydra.nixos.org/build/33610365/nixlog/1/raw
(cherry picked from commit 6448c94e57)
2016-03-26 18:23:59 +01:00
Joachim Fasting
3047988677 curl3: mark as broken
This is an ancient version of curl, that currently has 19 known vulnerabilities.
It is used by and was added to support only one package.

(cherry picked from commit 1f78d14028)
2016-03-26 18:23:59 +01:00
Octavian Cerna
4562338552 quagga: 0.99.24.1 -> 1.0.20160315
(cherry picked from commit c3ee17fe74)
Security update, fixes CVE-2016-2342
2016-03-26 12:59:06 +01:00
Peter Simons
6f10147e8c Synchronize Haskell package sets with master @ ce2c13675d.
The update was generated by hackage2nix from the following inputs:

  - Hackage: ab666959f0
  - LTS Haskell: 6c45757bda
  - Stackage Nightly: d8a2cae779
2016-03-26 11:27:05 +01:00
Cole Mickens
c110614936 python.pyjwt: platforms: linux -> unix 2016-03-25 23:48:09 +02:00
Pascal Wittmann
cecc1e32db eggdrop: fix build with gcc5 2016-03-25 19:56:43 +01:00
Eelco Dolstra
18fd4c1430 blender: Disable i686-linux build
The openimageio dependency doesn't build on i686. But probably nobody
cares about running Blender on 32-bit anymore.

http://hydra.nixos.org/build/33602734
(cherry picked from commit 7f61c7289f)
2016-03-25 16:45:23 +01:00
Eelco Dolstra
ab2ac03378 thunderbird: 38.6.0 -> 38.7.0
Lots of security fixes: https://www.mozilla.org/en-US/security/known-vulnerabilities/thunderbird/#thunderbird38.7

(cherry picked from commit aa6ab92d93)
2016-03-25 16:45:23 +01:00
Eelco Dolstra
970b8ee8e7 blender: 2.76b -> 2.77
(cherry picked from commit 4f47fe2f9c)
2016-03-25 16:45:23 +01:00
Eelco Dolstra
e81ca34a6a opensubdiv: 3.0.3 -> 3.0.4
(cherry picked from commit 5759b447dc)
2016-03-25 16:45:23 +01:00
Eelco Dolstra
5d391c49a9 nixpkgs-metrics: Suppress build products
(cherry picked from commit 03df731fb5)
2016-03-25 16:45:23 +01:00
Eelco Dolstra
5d24af631a Add metrics job to unstable aggregate
(cherry picked from commit c23e9e12f8)
2016-03-25 16:45:23 +01:00
Eelco Dolstra
c3fe7bed20 Keep track of Nixpkgs/NixOS evaluation statistics
(cherry picked from commit fab439201e)
2016-03-25 16:45:23 +01:00
Domen Kožar
148b740a63 nix.useChroot: allow 'relaxed' as a value
(cherry picked from commit cfc1fe345ceb77131a4f7461e28f482baf626de3)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-25 12:48:06 +00:00
Tim Steinbach
92f1827ea0 grsecurity: 4.4.4 -> 4.4.5
(cherry picked from commit a5d8256df4)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-25 09:59:03 +00:00
Domen Kožar
23730413fe kernel: fix build of 3.10 and 3.12 on i686 2016-03-25 09:50:25 +00:00
Domen Kožar
efc7b847e3 libgda: 5.2.2 -> 5.2.4 (fix build) 2016-03-25 09:50:25 +00:00
Joachim Fasting
49e5d4c507 bigloo: pin gcc version to 4.9
https://hydra.nixos.org/build/33120353/nixlog/1/raw
(cherry picked from commit 803b21959e)
2016-03-25 07:55:33 +01:00
Joachim Fasting
67fee4b26e hugs: fix build & meta fixups
Fix build by applying a patch from Arch Linux.
See https://hydra.nixos.org/build/33247205/log/raw

Meta fixups
- The license is actually 3-clause BSD license.
- Use HTTPS homepage
- Adopt the package
- Convert src.sha256 to base32

(cherry picked from commit 04bcb88332)
2016-03-25 00:45:35 +01:00
Pascal Wittmann
bf92ff9657 tpm-tools: fix build by applying debians patch 2016-03-24 22:20:10 +01:00
Evgeny Egorochkin
1695966348 virtualization/azure: update the scripts for image maintenance 2016-03-24 22:38:37 +02:00
Evgeny Egorochkin
13c0d0c86e azure: package sdk and vhd tools for go 2016-03-24 22:38:28 +02:00
Evgeny Egorochkin
cb69e43ad0 virtualization/azure: reorder WALA and SSHD 2016-03-24 22:38:18 +02:00
Evgeny Egorochkin
e86c38f9b3 virtualization/azure: turn off verbose logging 2016-03-24 22:38:08 +02:00
Evgeny Egorochkin
e32412ae22 virtualization/azure: make the image dynamic again since azure-cli upload bug is fixed 2016-03-24 22:37:58 +02:00
Evgeny Egorochkin
45f34ab410 virtualization/azure: take entropy handling code out of WALA and execute it before SSHD generates the host keys 2016-03-24 22:37:47 +02:00
Cole Mickens
c938ab4dc8 virtualization/azure: fixes
azure-agent: add option for verbose logging
azure-agent: disable ssh host key regeneration
azure-common: set verbose logging on
azure-image: increase size to 30GB
2016-03-24 22:37:36 +02:00
Evgeny Egorochkin
32f0c51ab7 azure-image: provide configuration.nix which allows nixos-rebuild to build a working generation and add helpful comments 2016-03-24 22:36:40 +02:00
Pascal Wittmann
523ce20887 jbig2enc: fix build 2016-03-24 20:21:57 +01:00
Joachim Fasting
d64126b0fe cadaver: fix build against newer versions of openssl
Apply patch from Arch Linux.

See https://hydra.nixos.org/build/33258957/nixlog/1/raw

(cherry picked from commit db6ae35bd9)
2016-03-24 19:06:46 +01:00
Pascal Wittmann
273cc85e69 ispc: fix build 2016-03-24 18:12:49 +01:00
Domen Kožar
5ea3dfcc6e bootstrapped-pip: support Python 2.6
(cherry picked from commit 9c274b4bef)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-24 15:45:07 +00:00
Adam Bell
0bac5850a2 pgadmin 1.20.0 -> 1.22.1
(cherry picked from commit 854b13dc00)

Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-24 15:44:54 +00:00
Domen Kožar
6486138750 linux: 4.4.5 -> 4.4.6 2016-03-24 15:44:54 +00:00
Domen Kožar
6df636feef springlobby: 0.195 -> 0.243 (fix build) 2016-03-24 15:44:54 +00:00
Mitchell Pleune
3cac2c5fc5 iodined service: wantedBy ip-up.target
When iodined tries to start before any interface other than loopback has an ip, iodined fails.
Wait for ip-up.target

The above is because of the following:
in iodined's code: src/common.c line 157
	the flag AI_ADDRCONFIG is passed as a flag to getaddrinfo.
	Iodine uses the function

		get_addr(char *host,
			int port,
			int addr_family,
			int flags,
			struct sockaddr_storage *out);

	to get address information via getaddrinfo().

	Within get_addr, the flag AI_ADDRCONFIG is forced.

	What this flag does, is cause getaddrinfo to return
	"Name or service not known" as an error explicitly if no ip
	has been assigned to the computer.
	see getaddrinfo(3)

Wait for an ip before starting iodined.

(cherry picked from commit 927aaecbcb)
2016-03-24 14:24:00 +01:00
Lluís Batlle i Rossell
9fb09319a3 octave: update to 4.0.1
bugfix release, they say.

(cherry picked from commit 1f8ffdde5b)
2016-03-24 13:55:49 +01:00
Lluís Batlle i Rossell
b522266b77 octave: parallel building and check.
I think those were not updated since 3.8.2. They worked for me. Let's see.

(cherry picked from commit 74ccfd690d)
2016-03-24 13:55:49 +01:00
Eelco Dolstra
4be705574a Fix 16.03 version number
The 77900 delta does not correspond to the 16.03-beta tag, so git
describe gives a different version than the NixOS version.
2016-03-24 13:48:16 +01:00
Lluís Batlle i Rossell
b314616134 Setting gfortran to gcc5. octave was crashing.
Otherwise, using imread() in octave threw:

/nix/store/4fvwfzwg58d7167an550xm1k6m7px443-octave-4.0.0/lib/octave/4.0.0/oct/x86_64-unknown-linux-gnu/__magick_read__.oct: failed to load: /nix/store/w7xr6frwffrl135v7vpxdwmnx8l95j5m-gfortran-4.9.3/lib/libstdc++.so.6: version `GLIBCXX_3.4.21' not found (required by /nix/store/qlxkin1arzwbcpiny6amn8747wp8ndg7-graphicsmagick-1.3.21/lib/libGraphicsMagick++.so.11)

(this is from 16.03, although I push this to staging)

(cherry picked from commit a9d14e3452)
2016-03-24 10:58:32 +01:00
Eelco Dolstra
4be870bad3 stdenv-darwin: Fix dependency on bootstrapTools
Commit 2040a9ac57 changed the order of
$PATH elements, causing initialpath to appear after buildInputs. Thus
gnugrep ended up depending on bin/sh from bootstrapTools, rather than
from pkgs.bash. The fix is to provide pkgs.bash via buildInputs rather
than initialPath.

http://hydra.nixos.org/build/33276697
(cherry picked from commit 7fc24dfd21)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-24 09:04:17 +00:00
Vladimír Čunát
52604ad28d ensureNewerSourcesHook: fix problems with symlinks
Fixes #14043. Now symlinks themselves are touched instead of their
targets.

(cherry picked from commit ff60350eb9)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-24 09:04:17 +00:00
Tuomas Tynkkynen
97a7b595ce vc: Broken on i686
http://hydra.nixos.org/build/33122230/nixlog/1/raw =>

CMake Error at CMakeLists.txt:163 (message):
  Unsupported target architecture 'i686'.  No support_???.cpp file exists for
  this architecture.

(cherry picked from commit c58c1f3b50)
2016-03-24 02:23:48 +02:00
Nikolay Amiantov
99d1e66c4d elmPackages.elm-compiler: use old language-ecmascript
(cherry picked from commit f0187cb4c3)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 21:49:28 +00:00
Vladimír Čunát
76c266017d zopfli: disable parallel building
It was failing often, e.g.:
https://hydra.nixos.org/build/32101335/nixlog/1/raw

(cherry picked from commit b336ed89e6)
2016-03-23 20:21:18 +01:00
Graham Christensen
1a3edcdbda zam-plugins: sha256 changed
(cherry picked from commit 587ae0f63f)
2016-03-23 17:44:08 +01:00
Graham Christensen
796efbab1c vacuum: port to mkDerivation, add zlib for hydra failure
(cherry picked from commit aba56e7f59)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 16:06:59 +00:00
Graham Christensen
dc670e38fb codeblocks: ad libX11 build dependency to fix hydra build
http://hydra.nixos.org/build/33296816/nixlog/1
(cherry picked from commit 3f6023dd16)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 15:42:44 +00:00
Michael Raskin
6abce9522c gcl: gcc5 build: enforce old inline semantics for now; will be fixed in the next upstream release
(cherry picked from commit 9ed00ff086)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 15:41:45 +00:00
Joachim Fasting
9687ed9046 gcl: fix gcc5 build
Apply patch from Gentoo

(cherry picked from commit 27eac5313e)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 15:41:40 +00:00
Joachim Fasting
34455365b8 clisp: fix i686 build
Requires -falign-functions=4

See https://hydra.nixos.org/build/33256640/nixlog/1/raw

(cherry picked from commit 1972c5aa17)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 15:41:33 +00:00
Domen Kožar
b5d47a8cfe openimageio: 1.6.9 -> 1.6.11 (fixes build on i686) 2016-03-23 15:41:28 +00:00
Joachim Fasting
26a5e637b7 clementineFree: fix gcc5 build
Uses gcc switches that are no longer valid. Also strip
-Werror for good measure. See
https://hydra.nixos.org/build/33277865/nixlog/1/raw

(cherry picked from commit 72bcff71fe)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 15:41:28 +00:00
Domen Kožar
58153f164d Merge pull request #14160 from ttuegel/release-16.03
[release-16.03] KDE 5 fixes
2016-03-23 15:31:05 +00:00
Thomas Tuegel
cb9f989b18 kde5.l10n.sr: patch shebangs 2016-03-23 08:44:49 -05:00
Thomas Tuegel
e81ee2be29 kde5.l10n.nl: re-enable 2016-03-23 08:44:35 -05:00
Thomas Tuegel
3ff6d0492a kwin: allow CMake to set RPATH during build 2016-03-23 08:44:15 -05:00
Thomas Tuegel
b8db5897c7 calamares: mark broken 2016-03-23 08:44:05 -05:00
Ambroz Bizjak
d1afa1b0d1 opencsg: Fix build related to missing libX11 linking the example.
(cherry picked from commit 853d612c3f)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 12:58:23 +00:00
Graham Christensen
edca647059 slic3r: Add ModuleBuild to BuildInputs, due to http://hydra.nixos.org/build/33298227/nixlog/1
(cherry picked from commit 29cf3ecd78)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 12:54:15 +00:00
Octavian Cerna
257e0f78b0 hhvm: 3.6.0 -> 3.12.1
(cherry picked from commit 524310d29e)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 12:50:50 +00:00
Joachim Fasting
ae5901f97c arangodb: fix gcc5 build
https://hydra.nixos.org/build/33263863/nixlog/1/raw
(cherry picked from commit 037e815787)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 12:39:37 +00:00
Ludovic Courtès
7a1b8a3bdf Remove Guix.
(cherry picked from commit 5dab370d77)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 12:39:25 +00:00
Domen Kožar
709a2fd0c5 sync uids with master 2016-03-23 12:17:53 +00:00
Domen Kožar
0858ece1ad Pin hydra-www and hydra-queue-runner uids
hydra user is already pinned, this is needed due to
https://github.com/NixOS/nixpkgs/issues/14148
2016-03-23 12:15:29 +00:00
Tim Steinbach
ac79602d7d kernel: 3.14.63 -> 3.14.65
(cherry picked from commit 4274edbe40)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 11:33:20 +00:00
Tim Steinbach
80e93efbc9 kernel: 3.12.55 -> 3.12.57
(cherry picked from commit bf41deb889)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 11:33:12 +00:00
Tim Steinbach
fcb270e5f5 kernel: 3.10.99 -> 3.10.101
(cherry picked from commit 6f5f855a2e)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-23 11:33:00 +00:00
Charles Strahan
d664e30a14 go-1.6: fix the build
One of the test scripts dynamically creates and executes a bash script,
which attempts to use `/usr/bin/env bash`. This patches the file to
use the stdenv's shell instead.

Otherwise, the only way this could have worked was by building go_1_6
outside of the sandbox.

(cherry picked from commit 0547fd247f)
2016-03-23 00:28:44 -04:00
Joachim Fasting
531baf82ad cataclysm-dda: build recipe enhancements & gcc5 support
- Remove redundant platform check; meta.platforms is sufficient
- Use postPatch rather than override patchPhase entirely
- Strip -Werror
- Move build-time only dependencies to nativeBuildInputs

This also fixes gcc5 build, which fails due to a deprecated-declarations
warning (see https://hydra.nixos.org/build/33117020/nixlog/2/raw).

(cherry picked from commit 18b35bd741)
2016-03-23 01:44:55 +01:00
Joachim Fasting
77eaab7d6c criu: fix build
Would fail due to -Werror; see
https://hydra.nixos.org/build/33217086/nixlog/2/raw

(cherry picked from commit 3e1ec2b663)
2016-03-23 00:33:52 +01:00
Domen Kožar
ca6ac920ed fix perl modules for i3 2016-03-22 23:13:40 +00:00
Domen Kožar
4452a68425 remove elrangR15 and riak 1.3.0 as they're outdated 2016-03-22 21:39:38 +00:00
Kevin Cox
723989b6c4 mesos: Patch more executable paths.
(cherry picked from commit 8b7adf808e)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-22 21:28:27 +00:00
Kevin Cox
7e0c19c1a2 Mesos: 26.0 -> 27.1
(cherry picked from commit 2843d83905)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-22 21:28:11 +00:00
Domen Kožar
f115f87bb5 nix-exec: use stable Nix
(cherry picked from commit 59ba0fb295)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-22 21:24:34 +00:00
Domen Kožar
15761b0520 flow: 0.18.1 -> 0.22.1 2016-03-22 21:21:53 +00:00
Domen Kožar
5d21d6a8a6 perl: bind some CGI 2016-03-22 21:18:37 +00:00
Domen Kožar
0e41e8bbf0 xen: use gcc49 2016-03-22 21:04:30 +00:00
Domen Kožar
b4d6442660 cmis: fix build with gcc5 2016-03-22 20:59:25 +00:00
Pascal Wittmann
637e3b0b43 mailutils: fix build by using gcc49 2016-03-22 20:52:49 +01:00
Pascal Wittmann
5cdd170af6 zynaddsubfx: fix build
see #13559
2016-03-22 19:09:31 +01:00
Pascal Wittmann
d82e740f9b itk: ping gcc version to 4.9 2016-03-22 17:58:58 +01:00
Joachim Fasting
d5a73df00b archimedes: pin gcc to version 4.9
https://hydra.nixos.org/build/33241446/nixlog/1/raw
(cherry picked from commit 92261129ea)
2016-03-22 15:43:45 +01:00
Joachim Fasting
e10b2fbfb5 afterstep: pin gcc to version 4.9
See https://hydra.nixos.org/build/33238876/nixlog/1/raw

(cherry picked from commit aa4d438107)
2016-03-22 15:43:44 +01:00
Lluís Batlle i Rossell
c431588cd2 Making ffmpeg friendly for arm. 2016-03-22 15:01:18 +01:00
Lluís Batlle i Rossell
d1a8d192a5 Update linux raspberry-pi to 4.1.y.
I could boot it in pi2; I don't know if I needed new
firmware files in /boot.
2016-03-22 15:01:18 +01:00
Lluís Batlle i Rossell
b08b468c2b Fixing vxl build with gcc 5. 2016-03-22 14:42:35 +01:00
Domen Kožar
2e53a8dd7f perl-Net-SMTP-SSL: 1.01 -> 1.03 2016-03-21 21:46:30 +00:00
Pascal Wittmann
a699a188b7 bviplus: fix build
See #13559
2016-03-21 21:54:42 +01:00
Pascal Wittmann
2f666306aa atftp: fix build by using gcc49
See #13559
2016-03-21 21:46:14 +01:00
Pascal Wittmann
98c5bb1603 avarice: fix build by using gcc49
See #13559
2016-03-21 21:40:40 +01:00
Pascal Wittmann
a33efc99b4 amuleGui: fix build
See #13559
2016-03-21 21:29:58 +01:00
Franz Pletz
15d42c3605 nss: 3.22.2 -> 3.23 (security)
Fixes CVE-2016-1950.

See: https://developer.mozilla.org/en-US/docs/Mozilla/Projects/NSS/NSS_3.23_release_notes

(cherry picked from commit 1a9b272c09)
2016-03-21 21:29:24 +01:00
Joachim Fasting
2746f3a854 haka: fix build on gcc5
See https://hydra.nixos.org/build/33296799/nixlog/1/raw

(cherry picked from commit c1901038c8)
2016-03-21 19:12:36 +01:00
Graham Christensen
01f2389e2d squid: 3.5.1 -> 3.5.15 for CVE-2016-2571
(cherry picked from commit 75c90fff39)
2016-03-21 18:49:19 +01:00
aszlig
8c2fa806b8 google-chrome: Fix fetching upstream binary
Commit aa097946d2 only fixed evaluation.

Ssince 37dbd62 however, the fetchurl call is already implied so just
changing the path will still result in fetchurl (fetchurl ...), so let's
drop the outer fetchurl.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
Cc: @msteen, @benley
(cherry picked from commit 4d305102e0)
2016-03-21 16:18:35 +01:00
Tim Steinbach
dc3073b52f kernel: 3.18.27 -> 3.18.29 (close #14057)
(cherry picked from commit 6476075ccf)
2016-03-21 12:42:52 +01:00
Tim Steinbach
12329a24de kernel: 4.1.17 -> 4.1.20 (close #14058)
(cherry picked from commit 379709b404)
2016-03-21 12:42:32 +01:00
Vladimír Čunát
5c04d08990 chrome: fix evaluation after 6041cfe2af
(cherry picked from commit aa097946d2)
2016-03-21 12:05:49 +01:00
Vladimír Čunát
4515fe3934 Merge #14075: backport php security updates 2016-03-21 11:59:57 +01:00
aszlig
4b054488bd nixos/tests/chromium: Propagate "system" to pkgs
Assigning the channelMap by the function attrset argument at the
top-level of the test expression file may reference a different
architecture than we need for the tests.

So if we get the pkgs attribute by auto-calling, this will lead to test
failure because we have a different architecture for the test than for
the browser.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit e047d79279)
2016-03-21 04:21:29 +01:00
aszlig
274a7cf7ee chromium: Fix comment of upstream-info.nix
As of 6041cfe, the upstream-info.nix (back then it was called
sources.nix) is no longer in the source/ subdirectory, so we need to fix
that comment to say that the file is autogenerated from update.sh in the
*same* directory.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit 5ebd629c6f)
2016-03-21 04:21:29 +01:00
aszlig
b7aadc752e nixos/tests/chromium: Allow overriding channel map
This has been the case before e45c211, but it turns out that it's very
useful to override the channel packages so we can run tests with
different Chromium build options.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit 3bd71b135b)
2016-03-21 04:21:28 +01:00
aszlig
6f4d27f60f chromium: Revert working around --sysroot filter
This reverts commit 5979946c41.

I have tested this by building against the stable version of Chromium
and it seems to compile just fine, so it doesn't seem to be needed
anymore.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit fb65a0048a)
2016-03-21 04:21:27 +01:00
aszlig
5e3cbd4856 chromium: Show status about precompiling .py files
Only a aesthetics thingy, but also corrects the comment, because we're
essentially precompiling .py files, NOT the .pyc files (the latter are
the results).

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit 1f497204f7)
2016-03-21 04:21:26 +01:00
aszlig
7dcc25befe chromium: Move source/default.nix into common.nix
This addresses #12794 so that we now have only a single tarball where we
base our build on instead of splitting the source into different outputs
first and then reference the outputs.

The reason I did this in the first place is that we previously built the
sandbox as a different derivation and unpacking the whole source tree
just for building the sandbox was a bit too much.

As we now have namespaces sandbox built in by default we no longer have
that derivation anymore. It still might come up however if we want to
build NaCl as a separate derivation (see #8560), but splitting the
source code into things only NaCl might require is already too much work
and doesn't weight out the benefits.

Another issue with the source splitup is that Hydra now has an output
limit for non-fixed-output derivations which we're already hitting.

Tested the build against the stable channel and it went well, but I
haven't tested running the browser.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit 4f981b4f84)
2016-03-21 04:21:25 +01:00
aszlig
a7fea3ef54 chromium: Move fetchurl calls to getChannel
We always do something like "fetchurl channelProduct", so let's move it
to getChannel directly so we can avoid those fetchurl calls all over the
place.

Also, we can still access subattributes from the fetchurl call if we
need to, so there really is no need to expose the product's attributes
directly.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit 37dbd62a83)
2016-03-21 04:21:24 +01:00
aszlig
6334932eb7 chromium/plugins: Break long line
Yes, I know I'm a bit nitpicky, but lines >80 chars are very ugly if you
have two windows side-by-side.

Thus no feature changes here.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit 4984a2bf76)
2016-03-21 04:21:24 +01:00
aszlig
bfec68ed10 chromium/common.nix: Remove unreferenced attrs
We're going to refactor things anyway, so let's first get rid of
everything that's not used anymore.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit 985df3900d)
2016-03-21 04:21:23 +01:00
aszlig
5d47e25dc3 chromium/source: Move update.nix to parent dir
We now should have only the default.nix left in the source directory and
we can start to factor out the pieces into the Chromium main derivation
attributes.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit 6041cfe2af)
2016-03-21 04:21:22 +01:00
aszlig
8f211997eb chromium: Rename sources.nix to upstream-info.nix
The "sources.nix" also contains information about where to get binary
packages, so calling it "upstream-info.nix" fits better in terms of
naming.

Also, we're moving it away from the sources dir, because the latter will
soon vanish.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit 2d9a604907)
2016-03-21 04:21:21 +01:00
aszlig
3c72d75922 chromium/source: Move patches into its own subdir
We're going to reference the patches in the Chromium main build rather
than applying it to the sources. So as a first step, this should keep
the patches away from the "source" subdirectory so we can make it flat.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit d6b11ed722)
2016-03-21 04:21:20 +01:00
Franz Pletz
d1b7c0ea70 bind: 9.10.3 -> 9.10.3-P4 (security)
Fixes:

  * CVE-2016-1285: https://kb.isc.org/article/AA-01352/
  * CVE-2016-1286: https://kb.isc.org/article/AA-01353/

(cherry picked from commit 404a699a20)
2016-03-21 04:05:22 +01:00
Franz Pletz
51c88123b4 lxc: Add patch to fix bash completion
Fixes #9616.

(cherry picked from commit b33453bd98)
2016-03-21 03:10:52 +01:00
Franz Pletz
b79b512ef0 lxc: 1.1.4 -> 1.1.5
(cherry picked from commit 4d0d1124ae)
2016-03-21 03:10:50 +01:00
Peter Simons
cc86145ed5 Merge pull request #14082 from neilmayhew/fix/hardlink
hardlink: Fix Mac build error
2016-03-20 17:25:47 +01:00
Joachim Fasting
23445ad23e ldm: fix build
There were two problems:
- because buildPhase is specified directly, preBuild ends up never being
  executed; and
- the source is missing a header, resulting in an undefined reference error

(cherry picked from commit f59998055b)
2016-03-20 16:59:19 +01:00
Joachim Fasting
8e34256a80 nixos: disable the clfswm window manager module
(cherry picked from commit e891e50946)
2016-03-20 16:59:18 +01:00
Joachim Fasting
e1bb0cdf4a clfswm: mark as broken
This package has been broken for quite a long time.  I attempted
to fix it, to no avail.

See also: https://hydra.nixos.org/build/33498133/nixlog/2/raw

(cherry picked from commit 9ae0e6633e)
2016-03-20 16:59:18 +01:00
Joachim Fasting
5d98a082e5 lp_solve: mark as broken
See https://hydra.nixos.org/build/32393768/log/raw

(cherry picked from commit 649c2cd027)
2016-03-20 16:59:18 +01:00
Sander van der Burg
8cd4e77209 dysnomia: bump to version 0.5.1
(cherry picked from commit 7ed3dc6cfc)
2016-03-20 15:23:31 +00:00
Greyson
c7eba63f29 unifi: 4.7.6 -> 4.8.14
Includes upstream URL relocation.

(cherry picked from commit e379e4aa38)

Old tarball is missing.
2016-03-20 05:58:52 +01:00
Neil Mayhew
0fd813b299 hardlink: Fix Mac build error
Use $CC instead of hardcoded gcc which isn't used on Mac
2016-03-19 21:41:27 -06:00
Lluís Batlle i Rossell
50a2e74991 Missing path in prev commit
(cherry picked from commit 9b8b143c99)
2016-03-20 00:47:40 +01:00
Lluís Batlle i Rossell
34c3b3f27f Fixing icu for ARM with a patch from openembedded.
It failed with an "internal error".

(cherry picked from commit e6e7de082d)
2016-03-20 00:43:02 +01:00
Joachim Fasting
8dfd8b83d4 nano: 2.5.0 -> 2.5.3
Also move build-time dependencies to nativeBuildInputs

(cherry picked from commit d43578b599)
2016-03-19 22:56:14 +01:00
Joachim Fasting
413c01a0f8 ipxe: fix gcc5 build
gcc5 enables additional warnings, causing the build to fail with
-Werror.  The build could be fixed by specifically disabling errors
for `discarded-array-qualifiers` and `logical-not-parentheses` warnings,
but simply passing -Wno-error is more future proof.

See https://hydra.nixos.org/build/33274006/nixlog/1/raw

(cherry picked from commit 8dfb8d06f0)
2016-03-19 22:47:29 +01:00
lukasepple
7cec20c775 torbrowser: 5.5.3 -> 5.5.4
(cherry picked from commit 1dbf51416a)
2016-03-19 22:44:55 +01:00
Franz Pletz
fe3d97ea6b pythonPackages.libvirt: 1.3.0 -> 1.3.2
(cherry picked from commit b887b16a2c)
2016-03-19 17:37:01 +01:00
Franz Pletz
c5e6538869 pythonPackages.searx: 0.7.0 -> 0.8.1
The old version was broken.

(cherry picked from commit 87012e7321)
2016-03-19 16:56:37 +01:00
Franz Pletz
39a87f331c libvirt: Add fpletz as maintainer
(cherry picked from commit e312a34775)
2016-03-19 16:56:37 +01:00
Franz Pletz
2e5f582da8 libvirt: 1.3.0 -> 1.3.2
Fixes CVE-2015-5313.

(cherry picked from commit ff0cfc160f)
2016-03-19 16:56:37 +01:00
Peter Simons
89284c21bc wrap-gapps-hook.sh: fix double inclusion guard
The simple "return" would not override the non-zero error code set by the
preceding test command, therefore aborting scripts running with "set -e".

(cherry picked from commit af81505c00)
2016-03-19 15:58:35 +01:00
Peter Simons
cbcfaaa006 gnupg-2.1: add myself as maintainer plus minor cosmetic
(cherry picked from commit ab450f8477)
2016-03-19 15:58:12 +01:00
Peter Simons
9f75d283c1 gnupg-2.1: drop unnecessary autoreconf hook from build
This change also prevents gnupg 2.1 from considering itself as an
unstable development version, which it is not.

(cherry picked from commit eadf39a16c)
2016-03-19 15:58:01 +01:00
Yann Hodique
eacd290c33 git: 2.7.3 -> 2.7.4
(cherry picked from commit 31c317e09e)
2016-03-19 15:56:41 +01:00
Robert Scott
b07d941c8e php: 5.6.18 -> 5.6.19 (security update) 2016-03-19 13:31:14 +00:00
Robert Scott
a572a2f291 php: 5.5.32 -> 5.5.33 (security update) 2016-03-19 13:31:14 +00:00
Pierre Dal-Pra
f79a9ca3c0 docker: 1.10.0 -> 1.10.3
(cherry picked from commit d97805ccd0)
2016-03-19 12:50:21 +01:00
Joachim Fasting
ef8bb2e6e5 nixos/tests: fix docker test
The docker service is socket activated by default; thus,
`waitForUnit("docker.service")` before any docker command causes the
unit test to time out.

Instead, do `waitForUnit("sockets.target")` to ensure that sockets are
setup before running docker commands.

(cherry picked from commit ece457c62f)
2016-03-19 12:50:21 +01:00
Joachim Fasting
5dc086df29 flexcpp: patch all shebangs & fix installation
Using the original build recipe would result in an output
without the actual flexcpp binary.

(cherry picked from commit a8556bd5d7)
2016-03-19 12:50:21 +01:00
Joachim Fasting
eaeb0d8073 flexcpp: 2.03.00 -> 2.04.00
(cherry picked from commit dd177e62e3)
2016-03-19 12:50:21 +01:00
Joachim Fasting
684a291c66 bobcat: fix installation
Another hotfix for eae059b0b6
(I kind of jumped the gun on this one ...)

The `build install` command takes a positional argument
indicating which components to install; without it, nothing
is installed and the build fails to create the store output.

(cherry picked from commit 8e359b2e21)
2016-03-19 12:50:21 +01:00
Joachim Fasting
28625e932f bobcat: patch all shebangs
Hotfix for eae059b0b6

Not really a regression, but it turns out that the man page
target requires shebang patching as well.

(cherry picked from commit 3704901dc8)
2016-03-19 12:50:21 +01:00
Joachim Fasting
a0e49374df bobcat: 4.00.00 -> 4.01.04
Appears to fix https://hydra.nixos.org/build/33157535/nixlog/1/raw

(cherry picked from commit eae059b0b6)
2016-03-19 12:50:21 +01:00
Joachim Fasting
8be69850ab manual: fix meta.description in ruby example expression
Noted by @namore on github

(cherry picked from commit 49dc7e2d61)
2016-03-19 12:50:21 +01:00
Lluís Batlle i Rossell
d1cd035a3b Updating ts to 0.7.6.
(cherry picked from commit 41c05b47a0)
2016-03-19 12:24:27 +01:00
Sander van der Burg
38b79d7686 fetchbower: quote parameter to prevent ambigious redirects if version specifiers have wildcards
(cherry picked from commit 27e23486bb)
2016-03-18 12:06:37 +00:00
Peter Simons
d505f470d3 nixos/tests/firewall.nix: ping now succeeds in the firewall's default configuration
(cherry picked from commit c523aeffde)
2016-03-18 11:44:50 +01:00
Mathias Schreck
06115a3907 nodejs: 5.8.0 -> 5.9.0
(cherry picked from commit cd38a38316)
2016-03-18 10:10:15 +00:00
Peter Simons
1a07877b5c Set networking.firewall.allowPing = true by default.
This patch fixes https://github.com/NixOS/nixpkgs/issues/12927.

It would be great to configure good rate-limiting defaults for this via
/proc/sys/net/ipv4/icmp_ratelimit and /proc/sys/net/ipv6/icmp/ratelimit,
too, but I didn't since I don't know what a "good default" would be.

(cherry picked from commit a0ab4587b7)
2016-03-18 08:25:35 +01:00
Peter Simons
cfc1b69bed ghcjs: synchronize state with master @ daa03b0229 2016-03-18 08:23:31 +01:00
Peter Simons
014223bd00 cabal2nix: synchronize state with master @ daa03b0229 2016-03-18 08:23:31 +01:00
Peter Simons
26b6957daf Haskell: synchronize state with master @ daa03b0229 2016-03-18 08:23:30 +01:00
Tuomas Tynkkynen
fd4cdf183a ARM stdenv: Update bootstrap tools
For some reason, the current bootstrap tools fail to build gettext:

init2.c:37: MPFR assertion failed: (64 - 0) == ((64 - 0)/8) * 8 && sizeof(mp_limb_t) == ((64 - 0)/8)
libxml/xpath.c: In function 'xmlXPathCompPathExpr':
libxml/xpath.c:10627:1: internal compiler error: Aborted
 xmlXPathCompPathExpr(xmlXPathParserContextPtr ctxt) {
 ^
Please submit a full bug report,
with preprocessed source if appropriate.
See <http://gcc.gnu.org/bugs.html> for instructions.
make[5]: *** [libxml/libxml_rpl_la-xpath.lo] Error 1

I didn't investigate why this is the case but rebuilding the bootstrap
tools seems to help.

I used this old-ish WIP branch https://github.com/dezgeg/nixpkgs/commits/arm-bootstrap
since latest master has even more problems with cross-compiling anything.
(I will eventually push this stuff and make the ARM bootstraps build on hydra.)

(cherry picked from commit cdef1cdd38)
2016-03-18 03:17:45 +02:00
Robert Helgesson
8e11767d02 perl-Hook-LexWrap: 0.24 -> 0.25
Also add meta section.

(cherry picked from commit b9f7bb15e5)
2016-03-17 22:35:53 +01:00
Tanner Doshier
f6bec34f6f tarsnap: 1.0.36.1 -> 1.0.37
(cherry picked from commit ab1008014d)
2016-03-17 15:08:06 +01:00
Joachim Fasting
f95d5d0e86 dnscrypt-proxy service: documentation fixes
- fix `enable` option description
  using `mkEnableOption longDescription` is incorrect; override
  `description` instead
- additional details for proper usage of the service, including
  an example of the recommended configuration
- clarify `localAddress` option description
- clarify `localPort` option description
- clarify `customResolver` option description

(cherry picked from commit a0663e3709)
2016-03-17 15:07:44 +01:00
Joachim Fasting
d12bc4ffa1 electrum: 2.6.1 -> 2.6.2
(cherry picked from commit 1b3d974c98)
2016-03-17 15:06:12 +01:00
Joachim Fasting
9379081695 electrum: 2.5.4 -> 2.6.1
(cherry picked from commit 1ff8a6b6c4)
2016-03-17 15:06:11 +01:00
Domen Kožar
12c908ec5b nixUnstable: point to latest 1.12pre
(cherry picked from commit 8e398a88a1)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-17 13:03:50 +00:00
Domen Kožar
baf7f98b45 pythonPackages.cryptography: 1.1.1 -> 1.2.3 (fix openssl build) 2016-03-17 13:01:23 +00:00
Domen Kožar
00793f0756 nghttp2: fix url
(cherry picked from commit 826eeec841)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-17 10:56:46 +00:00
Vladimír Čunát
3fb27d83ab firefox: disable optimization hack (i686-linux)
It seems to build fine even without it, so the original reason doesn't
hold anymore:
https://github.com/NixOS/nixpkgs/commit/f4b5671b0d9e8904a4ad6b3fd85268

(cherry picked from commit 9be0c7d463)
2016-03-17 09:28:04 +01:00
Franz Pletz
c29ebc60f5 gitlab service: Remove emailFrom option
Not being used anymore. Use `services.gitlab.extraConfig.gitlab.email_from`
instead.

(cherry picked from commit 38579a1cc9)
2016-03-17 04:17:55 +01:00
Franz Pletz
72a9c3c018 gitlab: 8.5.5 -> 8.5.7
(cherry picked from commit 1cd99b1a48)
2016-03-17 03:39:48 +01:00
Peter Simons
88a20de886 git: update from version 2.7.1 to 2.7.3 2016-03-16 21:37:31 +01:00
Vladimír Čunát
066d56507e Revert "nvidia: 358.16 -> 361.28" to fix #13942
This reverts commit e0fe8cf204.
Befor updating we need to fixup problems related to GLVND transition.

(cherry picked from commit a1de225913)
2016-03-16 20:09:04 +01:00
Sander van der Burg
2e4c131749 disnixos: bump to version 0.4.1
(cherry picked from commit 0f46200f26)
2016-03-16 13:29:38 +00:00
Nikolay Amiantov
e4662de8c8 cups service: fix gutenprint update when there's no printers
(cherry picked from commit 851af5e888)
2016-03-15 21:47:18 +03:00
Eelco Dolstra
1010ced00c Remove setting non-existent sysctl options 2016-03-15 17:42:12 +01:00
Eelco Dolstra
ac18b492d5 NixOS release: Don't depend on chromium
This is failing because it exceeds the hydra-queue-runner size limit.

http://hydra.nixos.org/build/33303819
(cherry picked from commit 3135af2511)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-15 15:20:05 +00:00
Eelco Dolstra
5f2226ddc7 Fix NixOS tested job
(cherry picked from commit 55e71f45cc)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-15 15:19:56 +00:00
Eelco Dolstra
4066f15504 Build most ISOs/OVAs for x86_64-linux only
Probably not many people care about i686-linux any more, but building
all these images is fairly expensive (e.g. in the worst case, every
Nixpkgs commit would trigger a few gigabytes of uploads to S3).

(cherry picked from commit daa093bf3c)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-15 15:19:37 +00:00
Eelco Dolstra
d24e4eef6c Combine ISO generation steps
This folds adding hydra-build-products into the actual ISO generation,
preventing an unnecessary download of the ISO.

(cherry picked from commit 10293b87a9)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-15 15:18:30 +00:00
Eelco Dolstra
fa4b560533 Combine OVA generation steps
Previously this was done in three derivations (one to build the raw
disk image, one to convert to OVA, one to add a hydra-build-products
file). Now it's done in one step to reduce the amount of copying
to/from S3. In particular, not uploading the raw disk image prevents
us from hitting hydra-queue-runner's size limit of 2 GiB.

(cherry picked from commit 5cc7bcda30)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-15 15:18:25 +00:00
Joachim Fasting
e3153cc3a5 torbrowser: 5.5.2 -> 5.5.3
(cherry picked from commit 665e79324e)
2016-03-15 14:55:47 +01:00
Aneesh Agrawal
0b5026fe95 openssh: 7.2p1 -> 7.2p2 for OSA x11fwd.adv
Fixes OpenSSH Security Advisory x11fwd.adv, which is available at
http://www.openssh.com/txt/x11fwd.adv.

(cherry picked from commit e5ca25eb7a)
2016-03-15 00:39:44 +01:00
Aneesh Agrawal
848855a5ab openssh: update GSSAPI patch to openssh 7.2
(cherry picked from commit ce74aac132)
2016-03-15 00:39:43 +01:00
Aneesh Agrawal
448f8b0451 openssh: decouple gssapi patch from kerberos
The GSSAPI patch is useful but maintained by Debian, not upstream, and
can be slow to update. To avoid breaking openssh_with_kerberos when
the openssh version is bumped but the GSSAPI patch has not been updated,
don't enable the GSSAPI patch implicitly but require it to be explicitly
enabled.

(cherry picked from commit 9e86984fe0)
2016-03-15 00:39:43 +01:00
Eelco Dolstra
4d9f98face openssh: Fix build
(cherry picked from commit cc71804ab0)
2016-03-14 23:22:07 +01:00
Aneesh Agrawal
2d683367ed openssh: 7.1p2 -> 7.2p1
(cherry picked from commit 7f8d50b443)
2016-03-14 23:22:07 +01:00
Graham Christensen
1f6b9b333d ilbc: extract-cfile.awk has fallen off the internet
Close #13923.

(cherry picked from commit 2aae2af845)
2016-03-14 21:42:53 +01:00
Graham Christensen
331e442eb7 graphite2: security update 1.2.4 -> 1.3.6 (close #13918)
CVE-2016-1977 CVE-2016-2790 CVE-2016-2791 CVE-2016-2792
CVE-2016-2793 CVE-2016-2794 CVE-2016-2795 CVE-2016-2796
CVE-2016-2797 CVE-2016-2798 CVE-2016-2799 CVE-2016-2800
CVE-2016-2801 CVE-2016-2802

vcunat fixed the tarball name and redirected to github.

(cherry picked from commit c310cb9e46)
2016-03-14 21:31:42 +01:00
Graham Christensen
7bcbf598ae eduke32: 20150420-5160 moved to the old releases directory
Close #13922.

(cherry picked from commit f165334492)
2016-03-14 20:48:41 +01:00
Graham Christensen
fd3f80cf37 d4x: Update download URL to fedora, d4k.krasu.ru no longer exists
(cherry picked from commit 2c8cb42c71)
2016-03-14 20:48:38 +01:00
Eelco Dolstra
04e07c8db2 pull request #13919 from grahamc/samba
samba: 4.3.1 -> 4.3.6 for CVE-2015-7560 CVE-2016-0771
(cherry picked from commit 06bebedf66)
2016-03-14 20:12:22 +01:00
Vladimír Čunát
d9b70033a6 tevent, ldb: security updates needed for samba
I managed to find no news or changelog on these,
so it's rather a black box to me, but it's clear that (some) bumps
were required for samba.

(cherry picked from commit 530214321d)
2016-03-14 20:12:21 +01:00
Philipp Volguine
4b0fceda0f Gitlab package version 8.5.1 -> 8.5.5
-had to bump the versions on a few gem dependencies

(cherry picked from commit a2424fffd3)
2016-03-14 16:41:02 +01:00
Philipp Volguine
e2c85dc185 gitlab service startup fix
-gitlab-sidekiq was being started with a misspelled argument name
 which caused the mailer queue to never run and never send mail

(cherry picked from commit 10198b586e)
2016-03-14 16:36:27 +01:00
Joachim Fasting
fc2a19eeb4 sway: fix build
Building the manual requires docbook_xsl; in sandboxed environments,
the build would fail due to being unable to download missing docbook
resources.

Also include some minor improvements to the build recipe:
- use fetchFromGitHub
- move build-time dependencies to nativeBuildInputs

xref: https://github.com/NixOS/nixpkgs/pull/13900
(cherry picked from commit 868082f616)
2016-03-14 15:56:56 +01:00
Joachim Fasting
a2273f6125 transmission: build transmission-cli
As of version 2.92, transmission-cli is no longer built by default (it
is deprecated).  This breaks the bittorrent vmtest.  For now, explicitly
enable the cli.

(cherry picked from commit 4393e6f619)
2016-03-14 11:27:18 +01:00
Joel Moberg
8065ea839d avahi: fix test
Reflects module changes made by cdd7310a50

(cherry picked from commit 75e96d2c30)
2016-03-14 11:26:57 +01:00
宋文武
fe5d858fe0 drop my maintainership (close #13881)
(cherry picked from commit 93feb5d115)
2016-03-13 18:39:32 +01:00
Vincent Laporte
f960683794 pixman: remove legacy patches
(cherry picked from commit b1801168e3)
https://github.com/NixOS/nixpkgs/pull/13579#issuecomment-195994082
2016-03-13 18:15:28 +01:00
Frederik Rietdijk
a7c64d5df6 Merge pull request #13888 from Beauhurst/r16.03_django_updates
django security updates (backport to release-16.03)
2016-03-13 10:54:03 -04:00
Robert Scott
b992c1b19b django: 1.9.3 -> 1.9.4, 1.8.10 -> 1.8.11 2016-03-13 14:48:52 +00:00
aszlig
8b39b045d8 chromium/update.sh: Allow to be called out-of-tree
Changing the working directory to
pkgs/applications/networking/browsers/chromium is a bit annoying, so
let's make sure the script can be called from anywhere.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit a62f100ec3)
2016-03-13 12:33:09 +01:00
aszlig
84e8aa8105 chromium/common: Shut up about precompiling .pyc's
The errors are completely non-fatal and only cause a particular file to
be not precompiled. Unfortunately this can lead to confusion to whether
these errors are real errors or not, so let's shut it up completely
because they're *not* real errors.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit f7e2171937)
2016-03-13 12:33:09 +01:00
Graham Christensen
7d734df56d chromium: 49.0.2626.75 -> 50.0.2661.26 for CVE-2016-1643 CVE-2016-1644 CVE-2016-1645
(cherry picked from commit e54434751a)
2016-03-13 12:33:08 +01:00
Joachim Fasting
42079b64d5 fstar: 2016-01-12 -> 0.9.2.0
FStar has been broken for a while, due to its regression test failing.
Bump to the latest release, which is newer than the previous rev.

(cherry picked from commit f82a46cf58)
2016-03-13 11:31:17 +01:00
Joachim Fasting
ae349e1d43 abook: fix build with gcc5
(cherry picked from commit d4ae8b68cb)
2016-03-13 11:30:47 +01:00
Sheena Artrip
8df9058d46 spotify: 1.0.23.93 -> 1.0.25.127
(cherry picked from commit ebc5b6e1ad)
2016-03-13 06:00:31 +01:00
Svein Ove Aas
fbf59749d3 simp_le: 2016-01-09 -> 2016-02-06
(cherry picked from commit 4b998c1f94)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-12 20:43:28 +00:00
tg(x)
d54510bb57 pax-utils: 1.1.1 -> 1.1.6
(cherry picked from commit 184aca3907)
2016-03-12 17:49:54 +01:00
Domen Kožar
dbcb901036 fix installer tests #13559 2016-03-11 16:08:27 +00:00
Eelco Dolstra
5456a2d030 Move testBootstrapTools to make-bootstrap-tools.nix
(cherry picked from commit 639d7409f2)
2016-03-11 16:24:07 +01:00
zimbatm
bb4f9c93dc make-wrapper.sh: add an --unset argument
`--set FOO ""` is not strictly equivalent to `--unset FOO`. In the former case
the environment variable still exists with an empty string as a value.

(cherry picked from commit 5e5494a852)
2016-03-11 16:23:55 +01:00
Vladimír Čunát
207882e82f make-bootstrap-tools: fix #13629: glibc problems
On x86_64-linux glibc started to use linker scripts more extensively.

(cherry picked from commit aa564c9ed0)
(cherry picked from commit 5702bc3b55)
2016-03-11 16:21:17 +01:00
Eelco Dolstra
ad6fee913d Don't apply patchelf to itself
Since patchelf 0.8 rewrites binaries in place, this causes a bus
error.

(cherry picked from commit a6d19c28f1)
2016-03-11 16:21:01 +01:00
Eelco Dolstra
11fc5010e4 go-1.4: Update binutils patch to handle i686-linux
http://hydra.nixos.org/build/32872391
(cherry picked from commit a11a281071)
2016-03-11 16:20:24 +01:00
Eelco Dolstra
f7eb5d7419 librdf_redland: Fix dependency on libraptor2
This broke soprano/nepomuk.

http://hydra.nixos.org/build/32556702
(cherry picked from commit e9f1fa8bb0)
2016-03-11 16:20:19 +01:00
Eelco Dolstra
c4fc753068 go-1.4: Fix build against binutils 2.26
The go linker barfed on the new relocation types emitted by binutils
2.26.

https://github.com/golang/go/issues/13114
http://hydra.nixos.org/build/32554876
(cherry picked from commit ff69fc6fb9)
2016-03-11 16:20:16 +01:00
Eelco Dolstra
a77785096c binutils: Apply upstream bug fix
This broke syslinux: http://hydra.nixos.org/build/32430411/nixlog/89/raw

(cherry picked from commit 89742e6b05)
2016-03-11 16:20:09 +01:00
Eelco Dolstra
07b375e361 glibc: Enable separate debug symbols
The importance of glibc makes it worthwhile to provide debug
symbols. However, this revealed an issue with separateDebugInfo: it
was indiscriminately adding --build-id to all ld invocations, while in
fact it should only do that for final links. Glibc also uses non-final
("relocatable") links, leading to subsequent failure to apply a build
ID ("Cannot create .note.gnu.build-id section, --build-id
ignored"). So now ld-wrapper.sh only passes --build-id for final
links.

(cherry picked from commit d5bb6a1f9c)
2016-03-11 16:19:00 +01:00
Eelco Dolstra
0c00ca14ed separateDebugInfo: Compress debug sections at compile/link time
(cherry picked from commit 69a337edae)
2016-03-11 16:18:56 +01:00
Eelco Dolstra
e9f847ea54 stdenv-linux: Ensure binutils comes before bootstrapTools in $PATH
Otherwise, when building glibc and other packages, the "strip" from
bootstrapTools is used, which doesn't recognise some tags produced by
the newer "ld" from binutils.

(cherry picked from commit 2040a9ac57)
2016-03-11 16:18:49 +01:00
Eelco Dolstra
9bafea49ed stdenv-linux: Avoid building m4/bison twice
(cherry picked from commit 559ecc9212)
2016-03-11 16:18:45 +01:00
Eelco Dolstra
a33faa6919 binutils: 2.23.1 -> 2.26
There has been an abortive attempt to upgrade binutils in the past
(see #909). Since we can't stay stuck at 2.23.1 forever, let's try
again.

(cherry picked from commit 817145ebbc)
2016-03-11 16:18:41 +01:00
Eelco Dolstra
a3750f7803 perlPackages: Fix some Perl 5.22 breakage
Most was caused by the removal of CGI and Module::Build from Perl.

(cherry picked from commit e9a81e41ed)
2016-03-11 16:18:04 +01:00
Eelco Dolstra
fda621baec perl: Make 5.22 the default
(cherry picked from commit a85ba820a4)
2016-03-11 16:17:59 +01:00
Eelco Dolstra
4807161a4d m2crypto: 0.21.1 -> 0.23.0
The previous version broke because it required SSLv2 support in OpenSSL:

ImportError: /nix/store/c0z7qlycaa2jhqjq0v9vy3j4nw4layw2-python2.7-m2crypto-0.21.1/lib/python2.7/site-packages/M2Crypto/__m2crypto.so: undefined symbol: SSLv2_method
(cherry picked from commit 49f23a6028)
2016-03-11 16:16:33 +01:00
Eelco Dolstra
3f65ad28be perl-packages.nix: Remove unnecessary variable quotations
(cherry picked from commit 4e906f9fb2)
2016-03-11 16:16:19 +01:00
Eelco Dolstra
856fa7f3f2 LWP::Protocol::https: Fix SSL cert handling
We lost this in 9f358f809d. Updated to
use /etc/ssl/certs/ca-certificates.crt if it exists and SSL_CERT_FILE
is not set.

(cherry picked from commit bd7f379a3f)
2016-03-11 16:16:07 +01:00
Eelco Dolstra
ac22e8344d Remove unmaintained gnupdate script
(cherry picked from commit 50e1e69c0a)
2016-03-11 16:16:00 +01:00
Eelco Dolstra
43504afcb7 cudatoolkit: Add version 7.5.18
(cherry picked from commit 6d97de951d)
2016-03-11 16:15:52 +01:00
Eelco Dolstra
d9dbe89eb5 cudatoolkit: Merge into one file and use callPackages
(cherry picked from commit 6c1e3a82de)
2016-03-11 16:15:47 +01:00
Eelco Dolstra
41dc4eca0c nix-generate-from-cpan: Skip "if" package since it's part of Perl now
(cherry picked from commit e2ad72342e)
2016-03-11 16:15:38 +01:00
Eelco Dolstra
9f185a9669 nix-generate-from-cpan: Don't quote names that don't need it
(cherry picked from commit 50b950fe8d)
2016-03-11 16:15:33 +01:00
Eelco Dolstra
a7ca3a4afb ifplugd: Remove
This package hasn't been updated in 11 years, and isn't really useful
anymore in a modern Linux system.

(cherry picked from commit 6bd0c3fe9d)
2016-03-11 16:15:21 +01:00
Eelco Dolstra
7ff8194989 keen4: Add license
(cherry picked from commit 449894ccb5)
2016-03-11 16:15:15 +01:00
Eelco Dolstra
147249a5e6 firefox: 44.0.2 -> 45.0
(cherry picked from commit 0d6d91739f)
2016-03-11 16:14:43 +01:00
Eelco Dolstra
e00cbc0130 thunderbird: Fix hash
Commit 4a54794d18 upgraded Thunderbird's
version to 38.6.0 (accidentally?), but didn't change the hash. This
wasn't caught due to tarballs.nixos.org being keyed on hash only.

(cherry picked from commit d25135ff6e)
2016-03-11 16:14:23 +01:00
Eelco Dolstra
e64ae06426 thunderbird: Fix build on gcc 5
It was barfing due to --enable-stdcxx-compat. Not clear if/why we
still need this, so let's disable it. If necessary a fix is available
at https://bugzilla.mozilla.org/show_bug.cgi?id=1153109.

(cherry picked from commit 4f5d48abf5)
2016-03-11 16:14:12 +01:00
Eelco Dolstra
751718af64 pykde4: Fix gcc 5 build failure
(cherry picked from commit bb43b542d6)
2016-03-11 16:14:08 +01:00
Eelco Dolstra
d5ca898035 mtdutils: 1.5.1 -> 1.5.2
Fixes gcc 5 build failure.

(cherry picked from commit 1146f460d3)
2016-03-11 16:14:03 +01:00
Eelco Dolstra
ffb23b77e0 rcs: Fix build on gcc 5
http://hydra.nixos.org/build/32624218
(cherry picked from commit 1c74a16e10)
2016-03-11 16:13:59 +01:00
Eelco Dolstra
e1922e3103 Mark some packages as broken due to GCC 5
(cherry picked from commit 47a04ac52c)
2016-03-11 16:13:56 +01:00
Eelco Dolstra
8668905d22 gcc: Remove 4.3 and 4.4
GCC 4.3 was used by only one package ("self"), which I've marked as
broken.

(cherry picked from commit 62c562a522)
2016-03-11 16:13:51 +01:00
Eelco Dolstra
80961187e0 gsl: Disable tests on i686-linux
http://hydra.nixos.org/build/32624041
(cherry picked from commit dd1f01ec11)
2016-03-11 16:13:46 +01:00
Eelco Dolstra
a98d111d81 telepathy-qt: Fix build on gcc 5
-Werror considered harmful.

(cherry picked from commit 4bb9117992)
2016-03-11 16:13:42 +01:00
Eelco Dolstra
6ef4803b6d libcli: Fix build on gcc 5
http://hydra.nixos.org/build/32606953
(cherry picked from commit 6bec7cb9fc)
2016-03-11 16:13:38 +01:00
Eelco Dolstra
5e936719a0 aterm: Mark as broken
It segfaults when built with GCC 5. I could try to fix it, but it's
not clear if anybody still cares about this package. Disabling it
until somebody complains.

http://hydra.nixos.org/build/32612811
(cherry picked from commit a5b501a36e)
2016-03-11 16:13:30 +01:00
Eelco Dolstra
a9b01de70d gcc: Use the pre-C++11 ABI by default
(cherry picked from commit 83011723af)
2016-03-11 16:13:01 +01:00
Eelco Dolstra
cb5f0fc64d Switch to GCC 5
(cherry picked from commit c388380bb4)
2016-03-11 16:12:57 +01:00
Eelco Dolstra
15db5f4353 openssl: Use 1.0.2 by default
Provided that not too much breaks, we should probably cherry-pick this
to 16.03, since the end of the 1.0.1 support window is a bit too close
to the expected lifetime of 16.0.3. @domenkozar

(cherry picked from commit e0d17fdf10)
2016-03-11 16:11:44 +01:00
Peter Simons
a56c4875dc perl-DateTimeX-Easy: disable failing test suite to fix Hydra build
(cherry picked from commit 83f2a6792c)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-11 13:44:27 +00:00
Lluís Batlle i Rossell
c5b31675a8 Adding rowhammer test.
(cherry picked from commit e026b5c243)
2016-03-11 14:33:40 +01:00
Lluís Batlle i Rossell
3f7eee0c40 Update ghdl mcode to 0.33.
(cherry picked from commit e9d6aadc51ecdd274cd383a99ea840a94b58d954)
(cherry picked from commit 7f7c2171c0)
2016-03-11 14:33:39 +01:00
Domen Kožar
838702564f nss: 3.22 -> 3.22.2 (CVE-2016-1950)
(cherry picked from commit 603ea2652f)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-11 12:18:51 +00:00
taku0
19c10d7347 flashplayer: 11.2.202.559 -> 11.2.202.577
(cherry picked from commit 218901bdb6)
2016-03-11 08:02:22 +01:00
Domen Kožar
8eba878ced speedtest-cli: 0.3.1 -> 0.3.4 (fix runtime)
(cherry picked from commit 83766949c1)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-10 20:43:02 +00:00
Vladimír Čunát
2b240463bf antimony: fix build with glibc-2.23
And enableParalelBuilding = true;

(cherry picked from commit 7ccccec51b)
2016-03-10 19:03:20 +01:00
Joachim Fasting
d1df887b29 faust2: fix build
Fixes https://github.com/NixOS/nixpkgs/issues/12749

The build failure was caused by brittle detection of the
llvm version. See the code for (excessive) details. This fix
is a quick hack, a proper fix would be to parse the version
of the input llvm derivation and use that to derive a proper
value. Here we just pin the version.

Also move build-time deps to `nativeBuildInputs`.

(cherry picked from commit 2f73decba8)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-10 16:51:45 +00:00
Domen Kožar
e1ce1fe14d Merge pull request #13598 from ttuegel/release-16.03-kde-5
Plasma 5.5.5
2016-03-10 16:48:53 +00:00
Vladimír Čunát
36bad18d57 nvidia-x11*: use mirror-agnostic URLs
(cherry picked from commit 17b83a88c3)
2016-03-10 15:57:35 +01:00
Graham Christensen
8c6cf0ca7c nvidia_x11_legacy304: 304.125 -> 304.131
Thanks to the great commit message in 6257425dab
(thank you edwtjo) I was able to go back and find out the patch which
was causing build failures is no longer necessary after upgrading
this legacy driver.

(cherry picked from commit fed36719f6)
Close #13799.

Tested-by: vcunat; it refuses to run with 4.4 kernel but it does with 4.1.
2016-03-10 15:51:59 +01:00
Al Zohali
b13a486f24 ChromiumOS kernel option fixup
(cherry picked from commit 9d03355bed)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-10 12:53:08 +00:00
Christoph Hrdinka
9b544ab3cc transmission: 2.90 -> 2.92
(cherry picked from commit c5695bc8be)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-10 12:51:58 +00:00
ashgillman
797b5a2f1b Use lcms2 for all pillow python versions
(cherry picked from commit 6ab7c50ea6)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-10 12:50:27 +00:00
Thomas Strobel
c74e3fd3ae xpra: 0.14.19/0.15.3 -> 0.16.2
(cherry picked from commit 83b15e2fc4)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-10 12:36:25 +00:00
Tim Steinbach
d41ac378b6 kernel: 4.4.4 -> 4.4.5
(cherry picked from commit 7c90420119)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-10 12:36:15 +00:00
Domen Kožar
e8e8164f34 Remove which -> type -P alias.
Aliases are not the same as programs. They won't work in subshells.
It's better to just use which as it's only 88K.

(cherry picked from commit 73ba0ae2de)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-10 10:40:29 +00:00
Domen Kožar
fb57ac55ff bareos: add rocksdb dependency
(cherry picked from commit 76f8ee2418)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-10 09:44:47 +00:00
Hoang Xuan Phu
6fa68be9c2 archiveopteryx: override specific build settings instead of PREFIX
Closes #13708 and fixes #13707.

(cherry picked from commit 5ac1de516e)
Closes #13805.
2016-03-10 09:35:16 +01:00
Franz Pletz
6377a295a3 Merge pull request #13796 from grahamc/libotr-16.03
libotr: upgrade v4, remove v3, and pidgin-otr for CVE-2016-2851 (16.03)
2016-03-10 00:24:01 +01:00
Graham Christensen
a039af10eb pidgin-otr: 4.0.1 -> 4.0.2 for CVE-2016-2851
(cherry picked from commit 6f8a914d57b5696e20c961659649aee286c4c7e6)
2016-03-09 17:23:42 -06:00
Graham Christensen
26833c5ecd libotr_3_2: remove, not updated since 2012, and unused.
(cherry picked from commit 6f574732a43ac24832ac92df99e8c826b301a4eb)
2016-03-09 17:23:42 -06:00
Franz Pletz
33b0851d88 libotr: 4.1.0 -> 4.1.1 (CVE-2016-2851)
https://www.x41-dsec.de/lab/advisories/x41-2016-001-libotr/
(cherry picked from commit 728cf25e16)
2016-03-10 00:14:40 +01:00
Vladimír Čunát
b28e618fb0 gpgstats: fix build on 32-bit; LFS problems
(cherry picked from commit 5782b5d3e8)
/cc #13559.
2016-03-09 23:16:45 +01:00
Nikolay Amiantov
c82d282f06 stepmania: fix on i686
(cherry picked from commit a75eb513c6)
2016-03-09 22:17:33 +03:00
Domen Kožar
34b06b4ba1 xen: remove unneeded depds now that stubdom is disabled
(cherry picked from commit 9ad60eae48)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-09 18:56:41 +00:00
kklas
d005b64940 sw-raid: make mdmon start from initrd
Also add required systemd services for starting/stopping mdmon.

(cherry picked from commit aac666e302)

See https://github.com/NixOS/nixpkgs/pull/13447#issuecomment-189963243 for cherry-pick discussion.
2016-03-09 21:20:36 +03:00
Domen Kožar
9c36de8cb2 xen: disable stubdom due to #13590
(cherry picked from commit 086a7d138d)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-09 13:52:01 +00:00
Lluís Batlle i Rossell
2e324f2144 Fixing my-env to get NIX_CFLAGS_COMPILE and so on
Yes, I still use my-env. I tested that it works on 16.03.

(cherry picked from commit 63ffd27a6b)
2016-03-09 12:45:45 +01:00
Pascal Wittmann
a0531e0394 libclc: 2015-03-27 -> 0.2.0
Fix build with newer LLVM version

(cherry picked from commit b1dd00f616)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-09 11:27:09 +00:00
Domen Kožar
5978790955 boringssl: 2014-08-20 -> 2016-03-08
(cherry picked from commit 64d5af4663)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-09 11:27:09 +00:00
Rok Garbas
e696b60f38 nixos/doc: adding line about the change in service.syncthing 2016-03-09 12:25:58 +01:00
Rob Vermaas
aecf27fe92 Remove kill -9 -1 from initrd of amazon-image.nix. This causes a kernel panic.
(cherry picked from commit ed5920ec65)
2016-03-09 09:56:18 +00:00
John Chee
4baa4995f3 lastpass-cli: 0.7.0 -> 0.9.0
(cherry picked from commit e0b541acc7)
2016-03-09 07:22:11 +01:00
Joachim Fasting
e3613ab3ee dnscrypt-proxy service: fix apparmor profile
The daemon additionally requires libcap, liblz4, and libattr.

(cherry picked from commit e7cfccbcc2)
2016-03-09 05:17:32 +01:00
Joachim Fasting
55588d7acd dnscrypt-proxy service: fix default resolver name
The "opendns" resolver has changed name to "cisco", causing the default
dnscrypt-proxy configuration to fail.

(cherry picked from commit e3ae435aad)
2016-03-09 03:01:17 +01:00
Mathieu Boespflug
440e2a757a spark: Fix tarball hash.
Maybe tarball changed upstream. Who knows.

Fixes #13703

(cherry picked from commit 6cf1853f29)

@joachifm: the original used the archive checksum, whereas `fetchzip` uses the
checksum of the archive contents.
2016-03-08 16:19:32 +01:00
Nathan Zadoks
2a36173043 bird module: run as user/group bird, not ircd
(cherry picked from commit 0360e410b7)
2016-03-08 11:58:34 +01:00
Sander van der Burg
db10d1bd21 runLatex: always include basic texlive stuff (thanks to vcunat)
(cherry picked from commit e91b9bede0)
2016-03-08 10:50:07 +00:00
Franz Pletz
69dbce32ce grsecurity: 4.4.2 -> 4.4.4
See #13505.

(cherry picked from commit 255d710757)
2016-03-08 01:07:55 +01:00
Franz Pletz
613dfd513c Merge remote-tracking branch 'origin/pr/13505'
Fixes #13505.

(cherry picked from commit eb5a897161)
2016-03-08 01:07:40 +01:00
Franz Pletz
3a5a26e2ed linux_4_4: 4.4.3 -> 4.4.4
(cherry picked from commit 3b1f2e070b)
2016-03-07 23:35:07 +01:00
Franz Pletz
89774f5b38 linux_3_14: 3.14.61 -> 3.14.63
(cherry picked from commit af40e356fe)
2016-03-07 23:35:05 +01:00
Franz Pletz
3356e5c033 linux_3_12: 3.12.54 -> 3.12.55
(cherry picked from commit 354a1935d3)
2016-03-07 23:35:04 +01:00
Franz Pletz
bc6547a13f linux_3_10: 3.10.97 -> 3.10.99
(cherry picked from commit 5b8361c118)
2016-03-07 23:34:59 +01:00
Svein Ove Aas
5b3c61ea5f nvidia: major update 358.16 -> 361.28
Beta also gets updated, but even upstream it's older than stable.
vcunat generalized parallel make. Close #12719.

(cherry picked from commit e0fe8cf204)
This is a new long-lived branch, so the update seems suitable for 16.03.
2016-03-07 23:20:15 +01:00
Henry Till
927ce5ca71 racket: fix build error
https://github.com/racket/racket/issues/1222

closes #13733

(cherry picked from commit cf71bc9255)
2016-03-07 21:28:54 +01:00
Robert Helgesson
50131497b7 eclipse-plugin-checkstyle: 6.14.0 -> 6.16.0
Download URL for 6.14.0 is broken, fixes #13746.

(cherry picked from commit 54c7ef17a9)
2016-03-07 21:11:33 +01:00
Christoph Hrdinka
b5a0c16f8f pidgin: fix gstreamer plugin path
Closes #13722, fixes #13719 and maybe #10556.

(cherry picked from commit d3e3b135ea)
2016-03-07 07:12:25 +01:00
Luca Bruno
3b5ae362d9 chromium: add StartupWMClass to desktop file. Fixes #12433
(cherry picked from commit 5f8311775c)
2016-03-06 21:52:47 +01:00
Bjørn Forsman
1db66c9f47 grafana service: unbreak
Accidentally broken by 4fede53c09
("nixos manuals: bring back package references").

Without this fix, grafana won't start:

$ systemctl status grafana
...
systemd[1]: Starting Grafana Service Daemon...
systemd[1]: Started Grafana Service Daemon.
grafana[666]: 2016/03/06 19:57:32 [log.go:75 Fatal()] [E] Failed to detect generated css or javascript files in static root (%!s(MISSING)), have you executed default grunt task?
systemd[1]: grafana.service: Main process exited, code=exited, status=1/FAILURE
systemd[1]: grafana.service: Unit entered failed state.
systemd[1]: grafana.service: Failed with result 'exit-code'.

(cherry picked from commit d99033beb9)
2016-03-06 21:44:53 +01:00
Vladimír Čunát
1e634a6fcc curl: use an official download link
It works now that we have e6f61b4cf3.

(cherry picked from commit a458a9f78f)
2016-03-06 11:13:49 +01:00
Profpatsch
0db38615dc manual/installing: add loadkeys hint
Closes #13702 and fixes #3132.
(Tiny changes by vcunat.)

(cherry picked from commit af4e8a4d3b)
2016-03-06 11:06:25 +01:00
aszlig
58d901d16f chromium: Update all channels to latest versions
Overview of the updated versions:

stable: 48.0.2564.116 -> 49.0.2623.75
beta:   49.0.2623.63  -> 49.0.2623.75
dev:    50.0.2657.0   -> 50.0.2661.11

Stable and beta are now in par because of the release of a major stable
update.

The release addresses 26 security vulnerabilities, the following with an
assigned CVE:

 * CVE-2016-1630: Same-origin bypass in Blink. Credit to Mariusz
                  Mlynski.
 * CVE-2016-1631: Same-origin bypass in Pepper Plugin. Credit to Mariusz
                  Mlynski.
 * CVE-2016-1632: Bad cast in Extensions. Credit to anonymous.
 * CVE-2016-1633: Use-after-free in Blink. Credit to cloudfuzzer.
 * CVE-2016-1634: Use-after-free in Blink. Credit to cloudfuzzer.
 * CVE-2016-1635: Use-after-free in Blink. Credit to Rob Wu.
 * CVE-2016-1636: SRI Validation Bypass. Credit to Ryan Lester and
                  Bryant Zadegan.
 * CVE-2015-8126: Out-of-bounds access in libpng. Credit to
                  joerg.bornemann.
 * CVE-2016-1637: Information Leak in Skia. Credit to Keve Nagy.
 * CVE-2016-1638: WebAPI Bypass. Credit to Rob Wu.
 * CVE-2016-1639: Use-after-free in WebRTC. Credit to Khalil Zhani.
 * CVE-2016-1640: Origin confusion in Extensions UI. Credit to Luan
                  Herrera.
 * CVE-2016-1641: Use-after-free in Favicon. Credit to Atte Kettunen of
                  OUSPG.

The full announcement which also includes the link to the bug tracker
can be found here:

http://googlechromereleases.blogspot.de/2016/03/stable-channel-update.html

Also, the 32bit Chrome package needed for the Flash and Widevine plugins
doesn't exist anymore, because Google has dropped support for 32bit
distros, see here for the announcement:

https://groups.google.com/a/chromium.org/forum/#!topic/chromium-dev/FoE6sL-p6oU

On our end, we need to fix the patch for the plugin paths to work for
the latest dev channel. The change is very minor, because the
nix_plugin_paths_46.patch only doesn't apply because of an iOS-related
ifdef.

Built and tested on my Hydra at:

https://headcounter.org/hydra/eval/311511

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
Fixes: #13665
(cherry picked from commit 8b97ca270e)
2016-03-05 22:54:06 +01:00
Vladimír Čunát
3a3f336148 nvidia_x11_legacy340: update 340.76 -> 340.94
Fixes #13658.

(cherry picked from commit 54d342add8)
2016-03-05 22:20:18 +01:00
Domen Kožar
780dc0ad29 Merge pull request #13689 from 4z3/release-16.03
exim: 4.86 -> 4.86.2
2016-03-05 19:24:07 +00:00
tv
62c29a96be exim: 4.86 -> 4.86.2 2016-03-05 14:45:10 +01:00
aszlig
0cb416f51d chromium/updater: Fix eval error on stdenv.is32bit
There is no stdenv.is32bit, so let's just use !stdenv.is64bit.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit c3d82f0fbf)
2016-03-05 03:16:52 +01:00
aszlig
181986627e chromium/updater: Fix getting latest versions
Comparing the current version with the version in sources list and
accidentally swapping the version arguments isn't going to get very far
because every new version that will come up will then be treated as "we
already have that version".

So we're now using versionOlder and also a check whether the version is
the *same* as the one in sources.nix.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit 8d5accb691)
2016-03-05 03:03:39 +01:00
Kevin Marsh
f3ab45e5dc django: 1.8.9 -> 1.8.10 2016-03-04 16:11:14 -05:00
Kevin Marsh
1e2ec46525 django: 1.9.2 -> 1.9.3 2016-03-04 16:11:07 -05:00
Robin Gloster
3d10bc8804 ceph: fix for zip timestamps
(cherry picked from commit e2372a4183)
Signed-off-by: Robin Gloster <mail@glob.in>
2016-03-04 14:17:26 +00:00
zimbatm
f7ac2e1b2e bats: fixes installation
The build was failing with:

  /...-stdenv/setup: ./install.sh: /usr/bin/env: bad interpreter: No such file
  or directory

See https://hydra.nixos.org/build/32353411/log

(cherry picked from commit a3119bd35d)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-03 15:19:16 +00:00
Domen Kožar
c9f73f8860 Merge pull request #13630 from mbakke/dnscrypt-stable
dnscrypt-proxy: 1.6.0 -> 1.6.1
2016-03-03 14:45:18 +00:00
Thomas Tuegel
73435915ba kde5.plasma.plasma-workspace: 5.5.5.1 -> 5.5.5.2
(cherry picked from commit 6af59c9d06)
2016-03-03 05:57:34 -06:00
Marius Bakke
2d6d111a3e dnscrypt-proxy: 1.6.0 -> 1.6.1 2016-03-02 23:49:44 +00:00
Domen Kožar
89d9159353 Merge pull request #13618 from cleverca22/multimc-release
multimc: fix building under chroot
2016-03-02 17:52:53 +00:00
michael bishop
ff28655321 multimc: fix building under chroot 2016-03-02 13:51:15 -04:00
Domen Kožar
31b5d57123 Attempt to fix transient grub1 test kernel panics
Example: http://hydra.nixos.org/build/32469819/nixlog/26/raw
(cherry picked from commit ba05826707)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-02 17:32:30 +00:00
Domen Kožar
12d4f7abfb remove lvm_33, fixes #12310
(cherry picked from commit d72e93f59d)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-02 17:01:15 +00:00
Eelco Dolstra
2068621b45 openssl: 1.0.1r -> 1.0.1s, 1.0.2f -> 1.0.2g
CVE-2016-0800. Fixes #13506.

(cherry picked from commit cdbd14a1a8)
2016-03-02 10:29:59 +01:00
aszlig
468a40bd89 nixos/release: Replace a: b: a // b by mergeAttrs
No change in functionality, it just looks nicer that way.

Signed-off-by: aszlig <aszlig@redmoonstudios.org>
(cherry picked from commit a429444a75)
2016-03-02 02:22:03 +01:00
Robin Gloster
a56d33a016 qt_gstreamer1: add upstream patch to build with current gstreamer
(cherry picked from commit 995475944f)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-03-01 23:45:32 +00:00
aszlig
a49ba9c6fe Merge pull request #13585 (nixos-tests-splitup)
This splits a few NixOS tests (namely Chromium, VirtualBox and the
networking tests) into several subtests that are exposed via attributes.

The networking tests were already split up but they didn't expose an
attribute set of available tests but used a function attribute to
specify the resulting test instead.

A new function callSubTests in nixos/release.nix is now responsible for
gathering subtests, which is also used for the installer and boot tests.
The latter is now placed in a tests.boot.* namespace rather than
"polluting" the tests attribute set with its subtest.
2016-03-01 23:19:00 +01:00
Thomas Tuegel
b96e0c2c8c kde5.plasma: 5.5.4 -> 5.5.5
This minor update includes bug and security fixes, so it should be
backported to the release branch.

(cherry picked from commit 78602b6806)
2016-03-01 13:45:25 -06:00
Thomas Tuegel
adb81add90 kde5: consolidate packages into desktops/kde-5
This is an organizational change from master. It is not strictly
necessary, but backporting it to the release branch will make it
significantly easier to backport patches or updates in the future.

(cherry picked from commit 98d8e1a160)
2016-03-01 13:44:33 -06:00
Luca Bruno
5440568fea devhelp: fix build with new webkitgtk
(cherry picked from commit b2889efff2)
2016-03-01 18:58:44 +01:00
Domen Kožar
a202c8027e openspades: fix build 2016-03-01 14:32:06 +01:00
Domen Kožar
746912a9ca rethinkdb: patch for glibc 2.23 2016-03-01 14:13:53 +01:00
Carles Pagès
503bb92245 kodiPlugins.pvr-hts: 2.1.18 -> 2.2.13
(cherry picked from commit 7eb15265d3)
2016-03-01 14:03:02 +01:00
Sander van der Burg
6adae8e146 nodejs-4_x: bump to version 4.3.1
(cherry picked from commit c8e55671cd)
2016-03-01 12:31:19 +00:00
zimbatm
461ed7cc55 redmine: fix compilation with ruby 2.3
Just bumped the JSON dependency manually to the one bundled with ruby 2.3

(cherry picked from commit de8c119a4b)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-02-29 13:49:37 +00:00
Domen Kožar
d07f940046 transmission: 2.84 -> 2.90
(cherry picked from commit 756604cc08)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-02-29 13:10:42 +00:00
Eelco Dolstra
eef44f2495 patchelf: Use 0.9 release tarball
(cherry picked from commit d255a8980a)
2016-02-29 11:42:20 +01:00
Eelco Dolstra
f5351fec36 patchelf: 0.8 -> 0.9
(cherry picked from commit 424af2cd52)
2016-02-29 11:42:16 +01:00
Eelco Dolstra
4025cb9e0c netpbm: Disable parallel building
Looks like the "partlist" file is constructed in an unsound way.

http://hydra.nixos.org/build/32430147
(cherry picked from commit abd3c246a4)
2016-02-29 11:42:04 +01:00
Eelco Dolstra
dc99ce8565 Manual: rl-unstable.xml -> rl-1603.xml
(cherry picked from commit 56e68d4d5f)
2016-02-29 11:41:23 +01:00
Vladimír Čunát
6ee0b0e335 haskellPackages: fix evaluation
It seems `self` was wanted instead of `pkgs` by the author of 3844206.

(cherry picked from commit 0294fc5bbd)
Signed-off-by: Domen Kožar <domen@dev.si>
2016-02-29 10:26:05 +00:00
Domen Kožar
74f22ff827 prepare for 16.03 2016-02-28 22:30:51 +00:00
17342 changed files with 1229173 additions and 1025346 deletions

View File

@@ -1,8 +0,0 @@
;;; Directory Local Variables
;;; For more information see (info "(emacs) Directory Variables")
((nil
(bug-reference-bug-regexp . "\\(\\(?:[Ii]ssue \\|[Ff]ixe[ds] \\|[Rr]esolve[ds]? \\|[Cc]lose[ds]? \\|[Pp]\\(?:ull [Rr]equest\\|[Rr]\\) \\|(\\)#\\([0-9]+\\))?\\)")
(bug-reference-url-format . "https://github.com/NixOS/nixpkgs/issues/%s"))
(nix-mode
(tab-width . 2)))

View File

@@ -1,28 +0,0 @@
# EditorConfig configuration for nixpkgs
# http://EditorConfig.org
# Top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file, utf-8 charset
[*]
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
charset = utf-8
# see https://nixos.org/nixpkgs/manual/#chap-conventions
# Match nix/ruby/docbook files, set indent to spaces with width of two
[*.{nix,rb,xml}]
indent_style = space
indent_size = 2
# Match shell/python/perl scripts, set indent to spaces with width of four
[*.{sh,py,pl}]
indent_style = space
indent_size = 4
# Match diffs, avoid to trim trailing whitespace
[*.{diff,patch}]
trim_trailing_whitespace = false

16
.gitattributes vendored
View File

@@ -1,16 +0,0 @@
**/deps.nix linguist-generated
**/node-packages.nix linguist-generated
pkgs/applications/editors/emacs-modes/*-generated.nix linguist-generated
pkgs/development/r-modules/*-packages.nix linguist-generated
pkgs/development/haskell-modules/hackage-packages.nix linguist-generated
pkgs/development/beam-modules/hex-packages.nix linguist-generated
doc/** linguist-documentation
doc/default.nix linguist-documentation=false
nixos/doc/** linguist-documentation
nixos/doc/default.nix linguist-documentation=false
nixos/modules/module-list.nix merge=union
# pkgs/top-level/all-packages.nix merge=union

99
.github/CODEOWNERS vendored
View File

@@ -1,99 +0,0 @@
# CODEOWNERS file
#
# This file is used to describe who owns what in this repository. This file does not
# replace `meta.maintainers` but is instead used for other things than derivations
# and modules, like documentation, package sets, and other assets.
#
# For documentation on this file, see https://help.github.com/articles/about-codeowners/
# Mentioned users will get code review requests.
# This file
/.github/CODEOWNERS @edolstra
# Libraries
/lib @edolstra @nbp
/lib/systems @nbp @ericson2314
/lib/generators.nix @edolstra @nbp @Profpatsch
/lib/debug.nix @edolstra @nbp @Profpatsch
# Nixpkgs Internals
/default.nix @nbp
/pkgs/top-level/default.nix @nbp @Ericson2314
/pkgs/top-level/impure.nix @nbp @Ericson2314
/pkgs/top-level/stage.nix @nbp @Ericson2314
/pkgs/stdenv/generic @Ericson2314
/pkgs/stdenv/cross @Ericson2314
/pkgs/build-support/cc-wrapper @Ericson2314 @orivej
/pkgs/build-support/bintools-wrapper @Ericson2314 @orivej
/pkgs/build-support/setup-hooks @Ericson2314
# NixOS Internals
/nixos/default.nix @nbp
/nixos/lib/from-env.nix @nbp
/nixos/lib/eval-config.nix @nbp
/nixos/doc/manual/configuration/abstractions.xml @nbp
/nixos/doc/manual/configuration/config-file.xml @nbp
/nixos/doc/manual/configuration/config-syntax.xml @nbp
/nixos/doc/manual/configuration/modularity.xml @nbp
/nixos/doc/manual/development/assertions.xml @nbp
/nixos/doc/manual/development/meta-attributes.xml @nbp
/nixos/doc/manual/development/option-declarations.xml @nbp
/nixos/doc/manual/development/option-def.xml @nbp
/nixos/doc/manual/development/option-types.xml @nbp
/nixos/doc/manual/development/replace-modules.xml @nbp
/nixos/doc/manual/development/writing-modules.xml @nbp
/nixos/doc/manual/man-nixos-option.xml @nbp
/nixos/modules/installer/tools/nixos-option.sh @nbp
# Python-related code and docs
/maintainers/scripts/update-python-libraries @FRidh
/pkgs/top-level/python-packages.nix @FRidh
/pkgs/development/interpreters/python @FRidh
/pkgs/development/python-modules @FRidh
/doc/languages-frameworks/python.md @FRidh
# Haskell
/pkgs/development/compilers/ghc @peti @ryantm @basvandijk
/pkgs/development/haskell-modules @peti @ryantm @basvandijk
/pkgs/development/haskell-modules/default.nix @peti @ryantm @basvandijk
/pkgs/development/haskell-modules/generic-builder.nix @peti @ryantm @basvandijk
/pkgs/development/haskell-modules/hoogle.nix @peti @ryantm @basvandijk
# R
/pkgs/applications/science/math/R @peti
/pkgs/development/r-modules @peti
# Ruby
/pkgs/development/interpreters/ruby @zimbatm
/pkgs/development/ruby-modules @zimbatm
# Rust
/pkgs/development/compilers/rust @Mic92 @LnL7
# Darwin-related
/pkgs/stdenv/darwin @NixOS/darwin-maintainers
/pkgs/os-specific/darwin @NixOS/darwin-maintainers
# Beam-related (Erlang, Elixir, LFE, etc)
/pkgs/development/beam-modules @gleber
/pkgs/development/interpreters/erlang @gleber
/pkgs/development/interpreters/lfe @gleber
/pkgs/development/interpreters/elixir @gleber
/pkgs/development/tools/build-managers/rebar @gleber
/pkgs/development/tools/build-managers/rebar3 @gleber
/pkgs/development/tools/erlang @gleber
# Jetbrains
/pkgs/applications/editors/jetbrains @edwtjo
# Eclipse
/pkgs/applications/editors/eclipse @rycee
# https://github.com/NixOS/nixpkgs/issues/31401
/lib/licenses.nix @ghost
# Qt / KDE
/pkgs/applications/kde @ttuegel
/pkgs/desktops/plasma-5 @ttuegel
/pkgs/development/libraries/kde-frameworks @ttuegel
/pkgs/development/libraries/qt-5 @ttuegel

View File

@@ -12,41 +12,4 @@ under the terms of [COPYING](../COPYING), which is an MIT-like license.
## Submitting changes
* Format the commit messages in the following way:
```
(pkg-name | nixos/<module>): (from -> to | init at version | refactor | etc)
(Motivation for change. Additional information.)
```
Examples:
* nginx: init at 2.0.1
* firefox: 54.0.1 -> 55.0
* nixos/hydra: add bazBaz option
Dual baz behavior is needed to do foo.
* nixos/nginx: refactor config generation
The old config generation system used impure shell scripts and could break in specific circumstances (see #1234).
* `meta.description` should:
* Be capitalized.
* Not start with the package name.
* Not have a period at the end.
* `meta.license` must be set and fit the upstream license.
* If there is no upstream license, `meta.license` should default to `stdenv.lib.licenses.unfree`.
* `meta.maintainers` must be set.
See the nixpkgs manual for more details on [standard meta-attributes](https://nixos.org/nixpkgs/manual/#sec-standard-meta-attributes) and on how to [submit changes to nixpkgs](https://nixos.org/nixpkgs/manual/#chap-submitting-changes).
## Writing good commit messages
In addition to writing properly formatted commit messages, it's important to include relevant information so other developers can later understand *why* a change was made. While this information usually can be found by digging code, mailing list/Discourse archives, pull request discussions or upstream changes, it may require a lot of work.
For package version upgrades and such a one-line commit message is usually sufficient.
## Reviewing contributions
See the nixpkgs manual for more details on how to [Review contributions](https://nixos.org/nixpkgs/manual/#sec-reviewing-contributions).
See the nixpkgs manual for details on how to [Submit changes to nixpkgs](http://hydra.nixos.org/job/nixpkgs/trunk/manual/latest/download-by-type/doc/manual#chap-submitting-changes).

View File

@@ -1,12 +1,17 @@
## Issue description
## Basic info
To make sure that we are on the same page:
* Kernel: (run `uname -a`)
* System: (NixOS: `nixos-version`, Ubuntu/Fedora: `lsb_release -a`, ...)
* Nix version: (run `nix-env --version`)
* Nixpkgs version: (run `nix-instantiate --eval '<nixpkgs>' -A lib.nixpkgsVersion`)
## Describe your issue here
### Expected result
### Actual result
### Steps to reproduce
## Technical details
Please run `nix-shell -p nix-info --run "nix-info -m"` and paste the
results.

View File

@@ -1,20 +1,18 @@
###### Motivation for this change
###### Things done:
###### Things done
<!-- Please check what applies. Note that these are not hard requirements but merely serve as information for reviewers. -->
- [ ] Tested using sandboxing ([nix.useSandbox](http://nixos.org/nixos/manual/options.html#opt-nix.useSandbox) on NixOS, or option `sandbox` in [`nix.conf`](http://nixos.org/nix/manual/#sec-conf-file) on non-NixOS)
- Built on platform(s)
- [ ] NixOS
- [ ] macOS
- [ ] other Linux distributions
- [ ] Tested via one or more NixOS test(s) if existing and applicable for the change (look inside [nixos/tests](https://github.com/NixOS/nixpkgs/blob/master/nixos/tests))
- [ ] Tested using sandboxing (`nix-build --option build-use-chroot true` or [nix.useChroot](http://nixos.org/nixos/manual/options.html#opt-nix.useChroot) on NixOS)
- [ ] Built on platform(s): NixOS / OSX / Linux
- [ ] Tested compilation of all pkgs that depend on this change using `nix-shell -p nox --run "nox-review wip"`
- [ ] Tested execution of all binary files (usually in `./result/bin/`)
- [ ] Determined the impact on package closure size (by running `nix path-info -S` before and after)
- [ ] Fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blob/master/.github/CONTRIBUTING.md).
###### More
Fixes issue #<insert id>
cc @<maintainer>
---
_Please note, that points are not mandatory, but rather desired._

5
.mention-bot Normal file
View File

@@ -0,0 +1,5 @@
{
"userBlacklist": [
"civodul"
]
}

7
.travis.yml Normal file
View File

@@ -0,0 +1,7 @@
language: python
python: "3.4"
sudo: required
dist: trusty
before_install: ./maintainers/scripts/travis-nox-review-pr.sh nix
install: ./maintainers/scripts/travis-nox-review-pr.sh nox
script: ./maintainers/scripts/travis-nox-review-pr.sh build

View File

@@ -1 +1 @@
18.09
16.03

View File

@@ -1,4 +1,4 @@
Copyright (c) 2003-2018 Eelco Dolstra and the Nixpkgs/NixOS contributors
Copyright (c) 2003-2016 Eelco Dolstra and the Nixpkgs/NixOS contributors
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
@@ -23,7 +23,9 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Note: the license above does not apply to the packages built by the
Nix Packages collection, merely to the package descriptions (i.e., Nix
expressions, build scripts, etc.). It also might not apply to patches
expressions, build scripts, etc.). Also, the license does not apply
to some of the binaries used for bootstrapping Nixpkgs (e.g.,
pkgs/stdenv/linux/tools/bash). It also might not apply to patches
included in Nixpkgs, which may be derivative works of the packages to
which they apply. The aforementioned artifacts are all covered by the
licenses of the respective packages.

View File

@@ -1,41 +1,43 @@
[<img src="https://nixos.org/logo/nixos-hires.png" width="500px" alt="logo" />](https://nixos.org/nixos)
[<img src="http://nixos.org/logo/nixos-hires.png" width="500px" alt="logo" />](https://nixos.org/nixos)
[![Code Triagers Badge](https://www.codetriage.com/nixos/nixpkgs/badges/users.svg)](https://www.codetriage.com/nixos/nixpkgs)
[![Build Status](https://travis-ci.org/NixOS/nixpkgs.svg?branch=master)](https://travis-ci.org/NixOS/nixpkgs)
[![Issue Stats](http://www.issuestats.com/github/nixos/nixpkgs/badge/pr?style=flat)](http://www.issuestats.com/github/nixos/nixpkgs)
[![Issue Stats](http://www.issuestats.com/github/nixos/nixpkgs/badge/issue?style=flat)](http://www.issuestats.com/github/nixos/nixpkgs)
Nixpkgs is a collection of packages for the [Nix](https://nixos.org/nix/) package
manager. It is periodically built and tested by the [Hydra](https://hydra.nixos.org/)
manager. It is periodically built and tested by the [hydra](http://hydra.nixos.org/)
build daemon as so-called channels. To get channel information via git, add
[nixpkgs-channels](https://github.com/NixOS/nixpkgs-channels.git) as a remote:
```
% git remote add channels https://github.com/NixOS/nixpkgs-channels.git
% git remote add channels git://github.com/NixOS/nixpkgs-channels.git
```
For stability and maximum binary package support, it is recommended to maintain
custom changes on top of one of the channels, e.g. `nixos-18.09` for the latest
custom changes on top of one of the channels, e.g. `nixos-15.09` for the latest
release and `nixos-unstable` for the latest successful build of master:
```
% git remote update channels
% git rebase channels/nixos-18.09
% git rebase channels/nixos-15.09
```
For pull-requests, please rebase onto nixpkgs `master`.
[NixOS](https://nixos.org/nixos/) Linux distribution source code is located inside
[NixOS](https://nixos.org/nixos/) linux distribution source code is located inside
`nixos/` folder.
* [NixOS installation instructions](https://nixos.org/nixos/manual/#ch-installation)
* [Documentation (Nix Expression Language chapter)](https://nixos.org/nix/manual/#ch-expression-language)
* [Manual (How to write packages for Nix)](https://nixos.org/nixpkgs/manual/)
* [Manual (NixOS)](https://nixos.org/nixos/manual/)
* [Community maintained wiki](https://nixos.wiki/)
* [Nix Wiki](https://nixos.org/wiki/)
* [Continuous package builds for unstable/master](https://hydra.nixos.org/jobset/nixos/trunk-combined)
* [Continuous package builds for 18.09 release](https://hydra.nixos.org/jobset/nixos/release-18.09)
* [Continuous package builds for 15.09 release](https://hydra.nixos.org/jobset/nixos/release-15.09)
* [Tests for unstable/master](https://hydra.nixos.org/job/nixos/trunk-combined/tested#tabs-constituents)
* [Tests for 18.09 release](https://hydra.nixos.org/job/nixos/release-18.09/tested#tabs-constituents)
* [Tests for 15.09 release](https://hydra.nixos.org/job/nixos/release-15.09/tested#tabs-constituents)
Communication:
* [Discourse Forum](https://discourse.nixos.org/)
* [Mailing list](http://lists.science.uu.nl/mailman/listinfo/nix-dev)
* [IRC - #nixos on freenode.net](irc://irc.freenode.net/#nixos)

View File

@@ -2,27 +2,8 @@ let requiredVersion = import ./lib/minver.nix; in
if ! builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
abort ''
This version of Nixpkgs requires Nix >= ${requiredVersion}, please upgrade:
- If you are running NixOS, `nixos-rebuild' can be used to upgrade your system.
- Alternatively, with Nix > 2.0 `nix upgrade-nix' can be used to imperatively
upgrade Nix. You may use `nix-env --version' to check which version you have.
- If you installed Nix using the install script (https://nixos.org/nix/install),
it is safe to upgrade by running it again:
curl https://nixos.org/nix/install | sh
For more information, please see the NixOS release notes at
https://nixos.org/nixos/manual or locally at
${toString ./doc/manual/release-notes}.
If you need further help, see https://nixos.org/nixos/support.html
''
abort "This version of Nixpkgs requires Nix >= ${requiredVersion}, please upgrade! See https://nixos.org/wiki/How_to_update_when_Nix_is_too_old_to_evaluate_Nixpkgs"
else
import ./pkgs/top-level/impure.nix
import ./pkgs/top-level/all-packages.nix

7
doc/.gitignore vendored
View File

@@ -1,7 +0,0 @@
*.chapter.xml
*.section.xml
.version
out
manual-full.xml
highlightjs
functions/library/locations.xml

View File

@@ -1,104 +0,0 @@
MD_TARGETS=$(addsuffix .xml, $(basename $(wildcard ./*.md ./**/*.md)))
.PHONY: all
all: validate format out/html/index.html out/epub/manual.epub
.PHONY: debug
debug:
nix-shell --run "xmloscopy --docbook5 ./manual.xml ./manual-full.xml"
.PHONY: format
format:
find . -iname '*.xml' -type f -print0 | xargs -0 -I{} -n1 \
xmlformat --config-file "$$XMLFORMAT_CONFIG" -i {}
.PHONY: fix-misc-xml
fix-misc-xml:
find . -iname '*.xml' -type f \
-exec ../nixos/doc/varlistentry-fixer.rb {} ';'
.PHONY: clean
clean:
rm -f ${MD_TARGETS} .version manual-full.xml functions/library/locations.xml
rm -rf ./out/ ./highlightjs
.PHONY: validate
validate: manual-full.xml
jing "$$RNG" manual-full.xml
out/html/index.html: manual-full.xml style.css highlightjs
mkdir -p out/html
xsltproc ${xsltFlags} \
--nonet --xinclude \
--output $@ \
"$$XSL/docbook/xhtml/docbook.xsl" \
./manual-full.xml
mkdir -p out/html/highlightjs/
cp -r highlightjs out/html/
cp ./overrides.css out/html/
cp ./style.css out/html/style.css
mkdir -p out/html/images/callouts
cp "$$XSL/docbook/images/callouts/"*.svg out/html/images/callouts/
chmod u+w -R out/html/
out/epub/manual.epub: manual-full.xml
mkdir -p out/epub/scratch
xsltproc ${xsltFlags} --nonet \
--output out/epub/scratch/ \
"$$XSL/docbook/epub/docbook.xsl" \
./manual-full.xml
cp ./overrides.css out/epub/scratch/OEBPS
cp ./style.css out/epub/scratch/OEBPS
mkdir -p out/epub/scratch/OEBPS/images/callouts/
cp "$$XSL/docbook/images/callouts/"*.svg out/epub/scratch/OEBPS/images/callouts/
echo "application/epub+zip" > mimetype
zip -0Xq "out/epub/manual.epub" mimetype
rm mimetype
cd "out/epub/scratch/" && zip -Xr9D "../manual.epub" *
rm -rf "out/epub/scratch/"
highlightjs:
mkdir -p highlightjs
cp -r "$$HIGHLIGHTJS/highlight.pack.js" highlightjs/
cp -r "$$HIGHLIGHTJS/LICENSE" highlightjs/
cp -r "$$HIGHLIGHTJS/mono-blue.css" highlightjs/
cp -r "$$HIGHLIGHTJS/loader.js" highlightjs/
manual-full.xml: ${MD_TARGETS} .version functions/library/locations.xml *.xml **/*.xml **/**/*.xml
xmllint --nonet --xinclude --noxincludenode manual.xml --output manual-full.xml
.version:
nix-instantiate --eval \
-E '(import ../lib).version' > .version
functions/library/locations.xml:
nix-build ./lib-function-locations.nix \
--out-link ./functions/library/locations.xml
%.section.xml: %.section.md
pandoc $^ -w docbook+smart \
-f markdown+smart \
| sed -e 's|<ulink url=|<link xlink:href=|' \
-e 's|</ulink>|</link>|' \
-e 's|<sect. id=|<section xml:id=|' \
-e 's|</sect[0-9]>|</section>|' \
-e '1s| id=| xml:id=|' \
-e '1s|\(<[^ ]* \)|\1xmlns="http://docbook.org/ns/docbook" xmlns:xlink="http://www.w3.org/1999/xlink" |' \
| cat > $@
%.chapter.xml: %.chapter.md
pandoc $^ -w docbook+smart \
--top-level-division=chapter \
-f markdown+smart \
| sed -e 's|<ulink url=|<link xlink:href=|' \
-e 's|</ulink>|</link>|' \
-e 's|<sect. id=|<section xml:id=|' \
-e 's|</sect[0-9]>|</section>|' \
-e '1s| id=| xml:id=|' \
-e '1s|\(<[^ ]* \)|\1|' \
| cat > $@

File diff suppressed because it is too large Load Diff

View File

@@ -1,294 +1,98 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="chap-packageconfig">
<title>Global configuration</title>
<para>
Nix comes with certain defaults about what packages can and cannot be
installed, based on a package's metadata. By default, Nix will prevent
installation if any of the following criteria are true:
</para>
<itemizedlist>
<listitem>
<para>
The package is thought to be broken, and has had its
<literal>meta.broken</literal> set to <literal>true</literal>.
</para>
</listitem>
<listitem>
<para>
The package isn't intended to run on the given system, as none of its
<literal>meta.platforms</literal> match the given system.
</para>
</listitem>
<listitem>
<para>
The package's <literal>meta.license</literal> is set to a license which is
considered to be unfree.
</para>
</listitem>
<listitem>
<para>
The package has known security vulnerabilities but has not or can not be
updated for some reason, and a list of issues has been entered in to the
package's <literal>meta.knownVulnerabilities</literal>.
</para>
</listitem>
</itemizedlist>
<para>
Note that all this is checked during evaluation already, and the check
includes any package that is evaluated. In particular, all build-time
dependencies are checked. <literal>nix-env -qa</literal> will (attempt to)
hide any packages that would be refused.
</para>
<para>
Each of these criteria can be altered in the nixpkgs configuration.
</para>
<para>
The nixpkgs configuration for a NixOS system is set in the
<literal>configuration.nix</literal>, as in the following example:
<programlisting>
{
nixpkgs.config = {
allowUnfree = true;
};
}
</programlisting>
However, this does not allow unfree software for individual users. Their
configurations are managed separately.
</para>
<para>
A user's of nixpkgs configuration is stored in a user-specific configuration
file located at <filename>~/.config/nixpkgs/config.nix</filename>. For
example:
<title><filename>~/.nixpkgs/config.nix</filename>: global configuration</title>
<para>Nix packages can be configured to allow or deny certain options.</para>
<para>To apply the configuration edit
<filename>~/.nixpkgs/config.nix</filename> and set it like
<programlisting>
{
allowUnfree = true;
}
</programlisting>
</para>
<para>
Note that we are not able to test or build unfree software on Hydra due to
policy. Most unfree licenses prohibit us from either executing or
distributing the software.
</para>
<section xml:id="sec-allow-broken">
<title>Installing broken packages</title>
<para>
There are two ways to try compiling a package which has been marked as
broken.
</para>
and will allow the Nix package manager to install unfree licensed packages.</para>
<para>The configuration as listed also applies to NixOS under
<option>nixpkgs.config</option> set.</para>
<itemizedlist>
<listitem>
<para>Allow installing of packages that are distributed under
unfree license by setting <programlisting>allowUnfree =
true;</programlisting> or deny them by setting it to
<literal>false</literal>.</para>
<para>Same can be achieved by setting the environment variable:
<itemizedlist>
<listitem>
<para>
For allowing the build of a broken package once, you can use an
environment variable for a single invocation of the nix tools:
<programlisting>$ export NIXPKGS_ALLOW_BROKEN=1</programlisting>
</para>
</listitem>
<listitem>
<para>
For permanently allowing broken packages to be built, you may add
<literal>allowBroken = true;</literal> to your user's configuration file,
like this:
<programlisting>
{
allowBroken = true;
}
$ export NIXPKGS_ALLOW_UNFREE=1
</programlisting>
</para>
</listitem>
<listitem>
<para>Whenever unfree packages are not allowed, single packages
can still be allowed by a predicate function that accepts package
as an argument and should return a boolean:
<programlisting>
allowUnfreePredicate = (pkg: ...);
</programlisting>
Example to allow flash player only:
<programlisting>
allowUnfreePredicate = (pkg: pkgs.lib.hasPrefix "flashplayer-" pkg.name);
</programlisting>
</para>
</listitem>
<listitem>
<para>Whenever unfree packages are not allowed, packages can still
be whitelisted by their license:
<programlisting>
whitelistedLicenses = with stdenv.lib.licenses; [ amd wtfpl ];
</programlisting>
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="sec-allow-unsupported-system">
<title>Installing packages on unsupported systems</title>
</listitem>
<para>
There are also two ways to try compiling a package which has been marked as
unsuported for the given system.
</para>
<listitem>
<para>In addition to whitelisting licenses which are denied by the
<literal>allowUnfree</literal> setting, you can also explicitely
deny installation of packages which have a certain license:
<itemizedlist>
<listitem>
<para>
For allowing the build of a broken package once, you can use an
environment variable for a single invocation of the nix tools:
<programlisting>$ export NIXPKGS_ALLOW_UNSUPPORTED_SYSTEM=1</programlisting>
</para>
</listitem>
<listitem>
<para>
For permanently allowing broken packages to be built, you may add
<literal>allowUnsupportedSystem = true;</literal> to your user's
configuration file, like this:
<programlisting>
{
allowUnsupportedSystem = true;
}
blacklistedLicenses = with stdenv.lib.licenses; [ agpl3 gpl3 ];
</programlisting>
</para>
</listitem>
</itemizedlist>
</listitem>
<para>
The difference between an a package being unsupported on some system and
being broken is admittedly a bit fuzzy. If a program
<emphasis>ought</emphasis> to work on a certain platform, but doesn't, the
platform should be included in <literal>meta.platforms</literal>, but marked
as broken with e.g. <literal>meta.broken =
!hostPlatform.isWindows</literal>. Of course, this begs the question of what
"ought" means exactly. That is left to the package maintainer.
</para>
</section>
<section xml:id="sec-allow-unfree">
<title>Installing unfree packages</title>
</itemizedlist>
<para>
There are several ways to tweak how Nix handles a package which has been
marked as unfree.
</para>
<para>A complete list of licenses can be found in the file
<filename>lib/licenses.nix</filename> of the nix package tree.</para>
<itemizedlist>
<listitem>
<para>
To temporarily allow all unfree packages, you can use an environment
variable for a single invocation of the nix tools:
<programlisting>$ export NIXPKGS_ALLOW_UNFREE=1</programlisting>
</para>
</listitem>
<listitem>
<para>
It is possible to permanently allow individual unfree packages, while
still blocking unfree packages by default using the
<literal>allowUnfreePredicate</literal> configuration option in the user
configuration file.
</para>
<para>
This option is a function which accepts a package as a parameter, and
returns a boolean. The following example configuration accepts a package
and always returns false:
<programlisting>
{
allowUnfreePredicate = (pkg: false);
}
</programlisting>
</para>
<para>
A more useful example, the following configuration allows only allows
flash player and visual studio code:
<programlisting>
{
allowUnfreePredicate = (pkg: elem (builtins.parseDrvName pkg.name).name [ "flashplayer" "vscode" ]);
}
</programlisting>
</para>
</listitem>
<listitem>
<para>
It is also possible to whitelist and blacklist licenses that are
specifically acceptable or not acceptable, using
<literal>whitelistedLicenses</literal> and
<literal>blacklistedLicenses</literal>, respectively.
</para>
<para>
The following example configuration whitelists the licenses
<literal>amd</literal> and <literal>wtfpl</literal>:
<programlisting>
{
whitelistedLicenses = with stdenv.lib.licenses; [ amd wtfpl ];
}
</programlisting>
</para>
<para>
The following example configuration blacklists the <literal>gpl3</literal>
and <literal>agpl3</literal> licenses:
<programlisting>
{
blacklistedLicenses = with stdenv.lib.licenses; [ agpl3 gpl3 ];
}
</programlisting>
</para>
</listitem>
</itemizedlist>
<para>
A complete list of licenses can be found in the file
<filename>lib/licenses.nix</filename> of the nixpkgs tree.
</para>
</section>
<section xml:id="sec-allow-insecure">
<title>Installing insecure packages</title>
<para>
There are several ways to tweak how Nix handles a package which has been
marked as insecure.
</para>
<itemizedlist>
<listitem>
<para>
To temporarily allow all insecure packages, you can use an environment
variable for a single invocation of the nix tools:
<programlisting>$ export NIXPKGS_ALLOW_INSECURE=1</programlisting>
</para>
</listitem>
<listitem>
<para>
It is possible to permanently allow individual insecure packages, while
still blocking other insecure packages by default using the
<literal>permittedInsecurePackages</literal> configuration option in the
user configuration file.
</para>
<para>
The following example configuration permits the installation of the
hypothetically insecure package <literal>hello</literal>, version
<literal>1.2.3</literal>:
<programlisting>
{
permittedInsecurePackages = [
"hello-1.2.3"
];
}
</programlisting>
</para>
</listitem>
<listitem>
<para>
It is also possible to create a custom policy around which insecure
packages to allow and deny, by overriding the
<literal>allowInsecurePredicate</literal> configuration option.
</para>
<para>
The <literal>allowInsecurePredicate</literal> option is a function which
accepts a package and returns a boolean, much like
<literal>allowUnfreePredicate</literal>.
</para>
<para>
The following configuration example only allows insecure packages with
very short names:
<programlisting>
{
allowInsecurePredicate = (pkg: (builtins.stringLength (builtins.parseDrvName pkg.name).name) &lt;= 5);
}
</programlisting>
</para>
<para>
Note that <literal>permittedInsecurePackages</literal> is only checked if
<literal>allowInsecurePredicate</literal> is not specified.
</para>
</listitem>
</itemizedlist>
</section>
<!--============================================================-->
<section xml:id="sec-modify-via-packageOverrides">
<title>Modify packages via <literal>packageOverrides</literal></title>
<para>
You can define a function called <varname>packageOverrides</varname> in your
local <filename>~/.config/nixpkgs/config.nix</filename> to override nix
packages. It must be a function that takes pkgs as an argument and return
modified set of packages.
<section xml:id="sec-modify-via-packageOverrides"><title>Modify
packages via <literal>packageOverrides</literal></title>
<para>You can define a function called
<varname>packageOverrides</varname> in your local
<filename>~/.nixpkgs/config.nix</filename> to overide nix packages. It
must be a function that takes pkgs as an argument and return modified
set of packages.
<programlisting>
{
packageOverrides = pkgs: rec {
@@ -296,214 +100,10 @@
};
}
</programlisting>
</para>
</section>
<section xml:id="sec-declarative-package-management">
<title>Declarative Package Management</title>
<section xml:id="sec-building-environment">
<title>Build an environment</title>
</para>
<para>
Using <literal>packageOverrides</literal>, it is possible to manage
packages declaratively. This means that we can list all of our desired
packages within a declarative Nix expression. For example, to have
<literal>aspell</literal>, <literal>bc</literal>,
<literal>ffmpeg</literal>, <literal>coreutils</literal>,
<literal>gdb</literal>, <literal>nixUnstable</literal>,
<literal>emscripten</literal>, <literal>jq</literal>,
<literal>nox</literal>, and <literal>silver-searcher</literal>, we could
use the following in <filename>~/.config/nixpkgs/config.nix</filename>:
</para>
</section>
<screen>
{
packageOverrides = pkgs: with pkgs; {
myPackages = pkgs.buildEnv {
name = "my-packages";
paths = [ aspell bc coreutils gdb ffmpeg nixUnstable emscripten jq nox silver-searcher ];
};
};
}
</screen>
<para>
To install it into our environment, you can just run <literal>nix-env -iA
nixpkgs.myPackages</literal>. If you want to load the packages to be built
from a working copy of <literal>nixpkgs</literal> you just run
<literal>nix-env -f. -iA myPackages</literal>. To explore what's been
installed, just look through <filename>~/.nix-profile/</filename>. You can
see that a lot of stuff has been installed. Some of this stuff is useful
some of it isn't. Let's tell Nixpkgs to only link the stuff that we want:
</para>
<screen>
{
packageOverrides = pkgs: with pkgs; {
myPackages = pkgs.buildEnv {
name = "my-packages";
paths = [ aspell bc coreutils gdb ffmpeg nixUnstable emscripten jq nox silver-searcher ];
pathsToLink = [ "/share" "/bin" ];
};
};
}
</screen>
<para>
<literal>pathsToLink</literal> tells Nixpkgs to only link the paths listed
which gets rid of the extra stuff in the profile. <filename>/bin</filename>
and <filename>/share</filename> are good defaults for a user environment,
getting rid of the clutter. If you are running on Nix on MacOS, you may
want to add another path as well, <filename>/Applications</filename>, that
makes GUI apps available.
</para>
</section>
<section xml:id="sec-getting-documentation">
<title>Getting documentation</title>
<para>
After building that new environment, look through
<filename>~/.nix-profile</filename> to make sure everything is there that
we wanted. Discerning readers will note that some files are missing. Look
inside <filename>~/.nix-profile/share/man/man1/</filename> to verify this.
There are no man pages for any of the Nix tools! This is because some
packages like Nix have multiple outputs for things like documentation (see
section 4). Let's make Nix install those as well.
</para>
<screen>
{
packageOverrides = pkgs: with pkgs; {
myPackages = pkgs.buildEnv {
name = "my-packages";
paths = [ aspell bc coreutils ffmpeg nixUnstable emscripten jq nox silver-searcher ];
pathsToLink = [ "/share/man" "/share/doc" "/bin" ];
extraOutputsToInstall = [ "man" "doc" ];
};
};
}
</screen>
<para>
This provides us with some useful documentation for using our packages.
However, if we actually want those manpages to be detected by man, we need
to set up our environment. This can also be managed within Nix expressions.
</para>
<screen>
{
packageOverrides = pkgs: with pkgs; rec {
myProfile = writeText "my-profile" ''
export PATH=$HOME/.nix-profile/bin:/nix/var/nix/profiles/default/bin:/sbin:/bin:/usr/sbin:/usr/bin
export MANPATH=$HOME/.nix-profile/share/man:/nix/var/nix/profiles/default/share/man:/usr/share/man
'';
myPackages = pkgs.buildEnv {
name = "my-packages";
paths = [
(runCommand "profile" {} ''
mkdir -p $out/etc/profile.d
cp ${myProfile} $out/etc/profile.d/my-profile.sh
'')
aspell
bc
coreutils
ffmpeg
man
nixUnstable
emscripten
jq
nox
silver-searcher
];
pathsToLink = [ "/share/man" "/share/doc" "/bin" "/etc" ];
extraOutputsToInstall = [ "man" "doc" ];
};
};
}
</screen>
<para>
For this to work fully, you must also have this script sourced when you are
logged in. Try adding something like this to your
<filename>~/.profile</filename> file:
</para>
<screen>
#!/bin/sh
if [ -d $HOME/.nix-profile/etc/profile.d ]; then
for i in $HOME/.nix-profile/etc/profile.d/*.sh; do
if [ -r $i ]; then
. $i
fi
done
fi
</screen>
<para>
Now just run <literal>source $HOME/.profile</literal> and you can starting
loading man pages from your environent.
</para>
</section>
<section xml:id="sec-gnu-info-setup">
<title>GNU info setup</title>
<para>
Configuring GNU info is a little bit trickier than man pages. To work
correctly, info needs a database to be generated. This can be done with
some small modifications to our environment scripts.
</para>
<screen>
{
packageOverrides = pkgs: with pkgs; rec {
myProfile = writeText "my-profile" ''
export PATH=$HOME/.nix-profile/bin:/nix/var/nix/profiles/default/bin:/sbin:/bin:/usr/sbin:/usr/bin
export MANPATH=$HOME/.nix-profile/share/man:/nix/var/nix/profiles/default/share/man:/usr/share/man
export INFOPATH=$HOME/.nix-profile/share/info:/nix/var/nix/profiles/default/share/info:/usr/share/info
'';
myPackages = pkgs.buildEnv {
name = "my-packages";
paths = [
(runCommand "profile" {} ''
mkdir -p $out/etc/profile.d
cp ${myProfile} $out/etc/profile.d/my-profile.sh
'')
aspell
bc
coreutils
ffmpeg
man
nixUnstable
emscripten
jq
nox
silver-searcher
texinfoInteractive
];
pathsToLink = [ "/share/man" "/share/doc" "/share/info" "/bin" "/etc" ];
extraOutputsToInstall = [ "man" "doc" "info" ];
postBuild = ''
if [ -x $out/bin/install-info -a -w $out/share/info ]; then
shopt -s nullglob
for i in $out/share/info/*.info $out/share/info/*.info.gz; do
$out/bin/install-info $i $out/share/info/dir
done
fi
'';
};
};
}
</screen>
<para>
<literal>postBuild</literal> tells Nixpkgs to run a command after building
the environment. In this case, <literal>install-info</literal> adds the
installed info pages to <literal>dir</literal> which is GNU info's default
root node. Note that <literal>texinfoInteractive</literal> is added to the
environment to give the <literal>install-info</literal> command.
</para>
</section>
</section>
</chapter>

View File

@@ -1,35 +1,20 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="chap-contributing">
<title>Contributing to this documentation</title>
<para>
The DocBook sources of the Nixpkgs manual are in the
<filename
<title>Contributing to this documentation</title>
<para>The DocBook sources of the Nixpkgs manual are in the <filename
xlink:href="https://github.com/NixOS/nixpkgs/tree/master/doc">doc</filename>
subdirectory of the Nixpkgs repository.
</para>
<para>
You can quickly check your edits with <command>make</command>:
</para>
subdirectory of the Nixpkgs repository. If you make modifications to
the manual, it's important to build it before committing. You can do that as follows:
<screen>
$ cd /path/to/nixpkgs/doc
$ nix-shell
[nix-shell]$ make
$ cd /path/to/nixpkgs
$ nix-build doc
</screen>
<para>
If you experience problems, run <command>make debug</command> to help
understand the docbook errors.
</para>
<para>
After making modifications to the manual, it's important to build it before
committing. You can do that as follows:
<screen>
$ cd /path/to/nixpkgs/doc
$ nix-shell
[nix-shell]$ make clean
[nix-shell]$ nix-build .
</screen>
If the build succeeds, the manual will be in
<filename>./result/share/doc/nixpkgs/manual.html</filename>.
</para>
If the build succeeds, the manual will be in
<filename>./result/share/doc/nixpkgs/manual.html</filename>.</para>
</chapter>

View File

@@ -1,475 +0,0 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="chap-cross">
<title>Cross-compilation</title>
<section xml:id="sec-cross-intro">
<title>Introduction</title>
<para>
"Cross-compilation" means compiling a program on one machine for another
type of machine. For example, a typical use of cross compilation is to
compile programs for embedded devices. These devices often don't have the
computing power and memory to compile their own programs. One might think
that cross-compilation is a fairly niche concern, but there are advantages
to being rigorous about distinguishing build-time vs run-time environments
even when one is developing and deploying on the same machine. Nixpkgs is
increasingly adopting the opinion that packages should be written with
cross-compilation in mind, and nixpkgs should evaluate in a similar way (by
minimizing cross-compilation-specific special cases) whether or not one is
cross-compiling.
</para>
<para>
This chapter will be organized in three parts. First, it will describe the
basics of how to package software in a way that supports cross-compilation.
Second, it will describe how to use Nixpkgs when cross-compiling. Third, it
will describe the internal infrastructure supporting cross-compilation.
</para>
</section>
<!--============================================================-->
<section xml:id="sec-cross-packaging">
<title>Packaging in a cross-friendly manner</title>
<section xml:id="sec-cross-platform-parameters">
<title>Platform parameters</title>
<para>
Nixpkgs follows the
<link xlink:href="https://gcc.gnu.org/onlinedocs/gccint/Configure-Terms.html">common
historical convention of GNU autoconf</link> of distinguishing between 3
types of platform: <wordasword>build</wordasword>,
<wordasword>host</wordasword>, and <wordasword>target</wordasword>. In
summary, <wordasword>build</wordasword> is the platform on which a package
is being built, <wordasword>host</wordasword> is the platform on which it
is to run. The third attribute, <wordasword>target</wordasword>, is
relevant only for certain specific compilers and build tools.
</para>
<para>
In Nixpkgs, these three platforms are defined as attribute sets under the
names <literal>buildPlatform</literal>, <literal>hostPlatform</literal>,
and <literal>targetPlatform</literal>. They are always defined as
attributes in the standard environment. That means one can access them
like:
<programlisting>{ stdenv, fooDep, barDep, .. }: ...stdenv.buildPlatform...</programlisting>
.
</para>
<variablelist>
<varlistentry>
<term>
<varname>buildPlatform</varname>
</term>
<listitem>
<para>
The "build platform" is the platform on which a package is built. Once
someone has a built package, or pre-built binary package, the build
platform should not matter and be safe to ignore.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>hostPlatform</varname>
</term>
<listitem>
<para>
The "host platform" is the platform on which a package will be run. This
is the simplest platform to understand, but also the one with the worst
name.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>targetPlatform</varname>
</term>
<listitem>
<para>
The "target platform" attribute is, unlike the other two attributes, not
actually fundamental to the process of building software. Instead, it is
only relevant for compatibility with building certain specific compilers
and build tools. It can be safely ignored for all other packages.
</para>
<para>
The build process of certain compilers is written in such a way that the
compiler resulting from a single build can itself only produce binaries
for a single platform. The task specifying this single "target platform"
is thus pushed to build time of the compiler. The root cause of this
mistake is often that the compiler (which will be run on the host) and
the the standard library/runtime (which will be run on the target) are
built by a single build process.
</para>
<para>
There is no fundamental need to think about a single target ahead of
time like this. If the tool supports modular or pluggable backends, both
the need to specify the target at build time and the constraint of
having only a single target disappear. An example of such a tool is
LLVM.
</para>
<para>
Although the existence of a "target platfom" is arguably a historical
mistake, it is a common one: examples of tools that suffer from it are
GCC, Binutils, GHC and Autoconf. Nixpkgs tries to avoid sharing in the
mistake where possible. Still, because the concept of a target platform
is so ingrained, it is best to support it as is.
</para>
</listitem>
</varlistentry>
</variablelist>
<para>
The exact schema these fields follow is a bit ill-defined due to a long and
convoluted evolution, but this is slowly being cleaned up. You can see
examples of ones used in practice in
<literal>lib.systems.examples</literal>; note how they are not all very
consistent. For now, here are few fields can count on them containing:
</para>
<variablelist>
<varlistentry>
<term>
<varname>system</varname>
</term>
<listitem>
<para>
This is a two-component shorthand for the platform. Examples of this
would be "x86_64-darwin" and "i686-linux"; see
<literal>lib.systems.doubles</literal> for more. This format isn't very
standard, but has built-in support in Nix, such as the
<varname>builtins.currentSystem</varname> impure string.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>config</varname>
</term>
<listitem>
<para>
This is a 3- or 4- component shorthand for the platform. Examples of
this would be "x86_64-unknown-linux-gnu" and "aarch64-apple-darwin14".
This is a standard format called the "LLVM target triple", as they are
pioneered by LLVM and traditionally just used for the
<varname>targetPlatform</varname>. This format is strictly more
informative than the "Nix host double", as the previous format could
analogously be termed. This needs a better name than
<varname>config</varname>!
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>parsed</varname>
</term>
<listitem>
<para>
This is a nix representation of a parsed LLVM target triple with
white-listed components. This can be specified directly, or actually
parsed from the <varname>config</varname>. [Technically, only one need
be specified and the others can be inferred, though the precision of
inference may not be very good.] See
<literal>lib.systems.parse</literal> for the exact representation.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>libc</varname>
</term>
<listitem>
<para>
This is a string identifying the standard C library used. Valid
identifiers include "glibc" for GNU libc, "libSystem" for Darwin's
Libsystem, and "uclibc" for µClibc. It should probably be refactored to
use the module system, like <varname>parse</varname>.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>is*</varname>
</term>
<listitem>
<para>
These predicates are defined in <literal>lib.systems.inspect</literal>,
and slapped on every platform. They are superior to the ones in
<varname>stdenv</varname> as they force the user to be explicit about
which platform they are inspecting. Please use these instead of those.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>platform</varname>
</term>
<listitem>
<para>
This is, quite frankly, a dumping ground of ad-hoc settings (it's an
attribute set). See <literal>lib.systems.platforms</literal> for
examples—there's hopefully one in there that will work verbatim for
each platform that is working. Please help us triage these flags and
give them better homes!
</para>
</listitem>
</varlistentry>
</variablelist>
</section>
<section xml:id="sec-cross-specifying-dependencies">
<title>Specifying Dependencies</title>
<para>
In this section we explore the relationship between both runtime and
buildtime dependencies and the 3 Autoconf platforms.
</para>
<para>
A runtime dependency between 2 packages implies that between them both the
host and target platforms match. This is directly implied by the meaning of
"host platform" and "runtime dependency": The package dependency exists
while both packages are running on a single host platform.
</para>
<para>
A build time dependency, however, implies a shift in platforms between the
depending package and the depended-on package. The meaning of a build time
dependency is that to build the depending package we need to be able to run
the depended-on's package. The depending package's build platform is
therefore equal to the depended-on package's host platform. Analogously,
the depending package's host platform is equal to the depended-on package's
target platform.
</para>
<para>
In this manner, given the 3 platforms for one package, we can determine the
three platforms for all its transitive dependencies. This is the most
important guiding principle behind cross-compilation with Nixpkgs, and will
be called the <wordasword>sliding window principle</wordasword>.
</para>
<para>
Some examples will probably make this clearer. If a package is being built
with a <literal>(build, host, target)</literal> platform triple of
<literal>(foo, bar, bar)</literal>, then its build-time dependencies would
have a triple of <literal>(foo, foo, bar)</literal>, and <emphasis>those
packages'</emphasis> build-time dependencies would have triple of
<literal>(foo, foo, foo)</literal>. In other words, it should take two
"rounds" of following build-time dependency edges before one reaches a
fixed point where, by the sliding window principle, the platform triple no
longer changes. Indeed, this happens with cross compilation, where only
rounds of native dependencies starting with the second necessarily coincide
with native packages.
</para>
<note>
<para>
The depending package's target platform is unconstrained by the sliding
window principle, which makes sense in that one can in principle build
cross compilers targeting arbitrary platforms.
</para>
</note>
<para>
How does this work in practice? Nixpkgs is now structured so that
build-time dependencies are taken from <varname>buildPackages</varname>,
whereas run-time dependencies are taken from the top level attribute set.
For example, <varname>buildPackages.gcc</varname> should be used at build
time, while <varname>gcc</varname> should be used at run time. Now, for
most of Nixpkgs's history, there was no <varname>buildPackages</varname>,
and most packages have not been refactored to use it explicitly. Instead,
one can use the six (<emphasis>gasp</emphasis>) attributes used for
specifying dependencies as documented in
<xref linkend="ssec-stdenv-dependencies"/>. We "splice" together the
run-time and build-time package sets with <varname>callPackage</varname>,
and then <varname>mkDerivation</varname> for each of four attributes pulls
the right derivation out. This splicing can be skipped when not cross
compiling as the package sets are the same, but is a bit slow for cross
compiling. Because of this, a best-of-both-worlds solution is in the works
with no splicing or explicit access of <varname>buildPackages</varname>
needed. For now, feel free to use either method.
</para>
<note>
<para>
There is also a "backlink" <varname>targetPackages</varname>, yielding a
package set whose <varname>buildPackages</varname> is the current package
set. This is a hack, though, to accommodate compilers with lousy build
systems. Please do not use this unless you are absolutely sure you are
packaging such a compiler and there is no other way.
</para>
</note>
</section>
<section xml:id="sec-cross-cookbook">
<title>Cross packaging cookbook</title>
<para>
Some frequently problems when packaging for cross compilation are good to
just spell and answer. Ideally the information above is exhaustive, so this
section cannot provide any new information, but its ludicrous and cruel to
expect everyone to spend effort working through the interaction of many
features just to figure out the same answer to the same common problem.
Feel free to add to this list!
</para>
<qandaset>
<qandaentry xml:id="cross-qa-build-c-program-in-build-environment">
<question>
<para>
What if my package's build system needs to build a C program to be run
under the build environment?
</para>
</question>
<answer>
<para>
<programlisting>depsBuildBuild = [ buildPackages.stdenv.cc ];</programlisting>
Add it to your <function>mkDerivation</function> invocation.
</para>
</answer>
</qandaentry>
<qandaentry xml:id="cross-qa-fails-to-find-ar">
<question>
<para>
My package fails to find <command>ar</command>.
</para>
</question>
<answer>
<para>
Many packages assume that an unprefixed <command>ar</command> is
available, but Nix doesn't provide one. It only provides a prefixed one,
just as it only does for all the other binutils programs. It may be
necessary to patch the package to fix the build system to use a prefixed
`ar`.
</para>
</answer>
</qandaentry>
<qandaentry xml:id="cross-testsuite-runs-host-code">
<question>
<para>
My package's testsuite needs to run host platform code.
</para>
</question>
<answer>
<para>
<programlisting>doCheck = stdenv.hostPlatform != stdenv.buildPlatfrom;</programlisting>
Add it to your <function>mkDerivation</function> invocation.
</para>
</answer>
</qandaentry>
</qandaset>
</section>
</section>
<!--============================================================-->
<section xml:id="sec-cross-usage">
<title>Cross-building packages</title>
<note>
<para>
More information needs to moved from the old wiki, especially
<link xlink:href="https://nixos.org/wiki/CrossCompiling" />, for this
section.
</para>
</note>
<para>
Nixpkgs can be instantiated with <varname>localSystem</varname> alone, in
which case there is no cross compiling and everything is built by and for
that system, or also with <varname>crossSystem</varname>, in which case
packages run on the latter, but all building happens on the former. Both
parameters take the same schema as the 3 (build, host, and target) platforms
defined in the previous section. As mentioned above,
<literal>lib.systems.examples</literal> has some platforms which are used as
arguments for these parameters in practice. You can use them
programmatically, or on the command line:
<programlisting>
nix-build &lt;nixpkgs&gt; --arg crossSystem '(import &lt;nixpkgs/lib&gt;).systems.examples.fooBarBaz' -A whatever</programlisting>
</para>
<note>
<para>
Eventually we would like to make these platform examples an unnecessary
convenience so that
<programlisting>
nix-build &lt;nixpkgs&gt; --arg crossSystem.config '&lt;arch&gt;-&lt;os&gt;-&lt;vendor&gt;-&lt;abi&gt;' -A whatever</programlisting>
works in the vast majority of cases. The problem today is dependencies on
other sorts of configuration which aren't given proper defaults. We rely on
the examples to crudely to set those configuration parameters in some
vaguely sane manner on the users behalf. Issue
<link xlink:href="https://github.com/NixOS/nixpkgs/issues/34274">#34274</link>
tracks this inconvenience along with its root cause in crufty configuration
options.
</para>
</note>
<para>
While one is free to pass both parameters in full, there's a lot of logic to
fill in missing fields. As discussed in the previous section, only one of
<varname>system</varname>, <varname>config</varname>, and
<varname>parsed</varname> is needed to infer the other two. Additionally,
<varname>libc</varname> will be inferred from <varname>parse</varname>.
Finally, <literal>localSystem.system</literal> is also
<emphasis>impurely</emphasis> inferred based on the platform evaluation
occurs. This means it is often not necessary to pass
<varname>localSystem</varname> at all, as in the command-line example in the
previous paragraph.
</para>
<note>
<para>
Many sources (manual, wiki, etc) probably mention passing
<varname>system</varname>, <varname>platform</varname>, along with the
optional <varname>crossSystem</varname> to nixpkgs: <literal>import
&lt;nixpkgs&gt; { system = ..; platform = ..; crossSystem = ..;
}</literal>. Passing those two instead of <varname>localSystem</varname> is
still supported for compatibility, but is discouraged. Indeed, much of the
inference we do for these parameters is motivated by compatibility as much
as convenience.
</para>
</note>
<para>
One would think that <varname>localSystem</varname> and
<varname>crossSystem</varname> overlap horribly with the three
<varname>*Platforms</varname> (<varname>buildPlatform</varname>,
<varname>hostPlatform,</varname> and <varname>targetPlatform</varname>; see
<varname>stage.nix</varname> or the manual). Actually, those identifiers are
purposefully not used here to draw a subtle but important distinction: While
the granularity of having 3 platforms is necessary to properly *build*
packages, it is overkill for specifying the user's *intent* when making a
build plan or package set. A simple "build vs deploy" dichotomy is adequate:
the sliding window principle described in the previous section shows how to
interpolate between the these two "end points" to get the 3 platform triple
for each bootstrapping stage. That means for any package a given package
set, even those not bound on the top level but only reachable via
dependencies or <varname>buildPackages</varname>, the three platforms will
be defined as one of <varname>localSystem</varname> or
<varname>crossSystem</varname>, with the former replacing the latter as one
traverses build-time dependencies. A last simple difference then is
<varname>crossSystem</varname> should be null when one doesn't want to
cross-compile, while the <varname>*Platform</varname>s are always non-null.
<varname>localSystem</varname> is always non-null.
</para>
</section>
<!--============================================================-->
<section xml:id="sec-cross-infra">
<title>Cross-compilation infrastructure</title>
<para>
To be written.
</para>
<note>
<para>
If one explores nixpkgs, they will see derivations with names like
<literal>gccCross</literal>. Such <literal>*Cross</literal> derivations is
a holdover from before we properly distinguished between the host and
target platforms —the derivation with "Cross" in the name covered the
<literal>build = host != target</literal> case, while the other covered the
<literal>host = target</literal>, with build platform the same or not based
on whether one was using its <literal>.nativeDrv</literal> or
<literal>.crossDrv</literal>. This ugliness will disappear soon.
</para>
</note>
</section>
</chapter>

View File

@@ -1,50 +1,85 @@
{ pkgs ? (import ./.. { }), nixpkgs ? { }}:
with import ./.. { };
with lib;
let
lib = pkgs.lib;
locationsXml = import ./lib-function-locations.nix { inherit pkgs nixpkgs; };
sources = sourceFilesBySuffices ./. [".xml"];
sources-langs = ./languages-frameworks;
in
pkgs.stdenv.mkDerivation {
stdenv.mkDerivation {
name = "nixpkgs-manual";
buildInputs = with pkgs; [ pandoc libxml2 libxslt zip jing xmlformat ];
src = ./.;
buildInputs = [ pandoc libxml2 libxslt ];
# Hacking on these variables? Make sure to close and open
# nix-shell between each test, maybe even:
# $ nix-shell --run "make clean all"
# otherwise they won't reapply :)
HIGHLIGHTJS = pkgs.documentation-highlighter;
XSL = "${pkgs.docbook_xsl_ns}/xml/xsl";
RNG = "${pkgs.docbook5}/xml/rng/docbook/docbook.rng";
XMLFORMAT_CONFIG = ../nixos/doc/xmlformat.conf;
xsltFlags = lib.concatStringsSep " " [
"--param section.autolabel 1"
"--param section.label.includes.component.label 1"
"--stringparam html.stylesheet 'style.css overrides.css highlightjs/mono-blue.css'"
"--stringparam html.script './highlightjs/highlight.pack.js ./highlightjs/loader.js'"
"--param xref.with.number.and.title 1"
"--param toc.section.depth 3"
"--stringparam admon.style ''"
"--stringparam callout.graphics.extension .svg"
];
postPatch = ''
rm -rf ./functions/library/locations.xml
ln -s ${locationsXml} ./functions/library/locations.xml
echo ${lib.version} > .version
xsltFlags = ''
--param section.autolabel 1
--param section.label.includes.component.label 1
--param html.stylesheet 'style.css'
--param xref.with.number.and.title 1
--param toc.section.depth 3
--param admon.style '''
--param callout.graphics.extension '.gif'
'';
installPhase = ''
dest="$out/share/doc/nixpkgs"
mkdir -p "$(dirname "$dest")"
mv out/html "$dest"
mv "$dest/index.html" "$dest/manual.html"
mv out/epub/manual.epub "$dest/nixpkgs-manual.epub"
buildCommand = let toDocbook = { useChapters ? false, inputFile, outputFile }:
let
extraHeader = ''xmlns="http://docbook.org/ns/docbook" xmlns:xlink="http://www.w3.org/1999/xlink" '';
in ''
{
pandoc '${inputFile}' -w docbook ${optionalString useChapters "--chapters"} \
| sed -e 's|<ulink url=|<link xlink:href=|' \
-e 's|</ulink>|</link>|' \
-e 's|<sect. id=|<section xml:id=|' \
-e 's|</sect[0-9]>|</section>|' \
-e '1s| id=| xml:id=|' \
-e '1s|\(<[^ ]* \)|\1${extraHeader}|'
} > '${outputFile}'
'';
in
mkdir -p $out/nix-support/
echo "doc manual $dest manual.html" >> $out/nix-support/hydra-build-products
echo "doc manual $dest nixpkgs-manual.epub" >> $out/nix-support/hydra-build-products
''
ln -s '${sources}/'*.xml .
mkdir ./languages-frameworks
cp -s '${sources-langs}'/* ./languages-frameworks
''
+ toDocbook {
inputFile = ./introduction.md;
outputFile = "introduction.xml";
useChapters = true;
}
+ toDocbook {
inputFile = ./haskell-users-guide.md;
outputFile = "haskell-users-guide.xml";
useChapters = true;
}
+ toDocbook {
inputFile = ./../pkgs/development/idris-modules/README.md;
outputFile = "languages-frameworks/idris.xml";
}
+ toDocbook {
inputFile = ./../pkgs/development/r-modules/README.md;
outputFile = "languages-frameworks/r.xml";
}
+ ''
echo ${nixpkgsVersion} > .version
# validate against relaxng schema
xmllint --nonet --xinclude --noxincludenode manual.xml --output manual-full.xml
${jing}/bin/jing ${docbook5}/xml/rng/docbook/docbook.rng manual-full.xml
dst=$out/share/doc/nixpkgs
mkdir -p $dst
xsltproc $xsltFlags --nonet --xinclude \
--output $dst/manual.html \
${docbook5_xsl}/xml/xsl/docbook/xhtml/docbook.xsl \
./manual.xml
cp ${./style.css} $dst/style.css
mkdir -p $dst/images/callouts
cp "${docbook5_xsl}/xml/xsl/docbook/images/callouts/"*.gif $dst/images/callouts/
mkdir -p $out/nix-support
echo "doc manual $dst manual.html" >> $out/nix-support/hydra-build-products
'';
}

305
doc/erlang-users-guide.xml Normal file
View File

@@ -0,0 +1,305 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="users-guide-to-the-erlang-infrastructure">
<title>User's Guide to the Erlang Infrastructure</title>
<section xml:id="build-tools">
<title>Build Tools</title>
<para>
By default Rebar3 wants to manage it's own dependencies. In the
normal non-Nix, this is perfectly acceptable. In the Nix world it
is not. To support this we have created two versions of rebar3,
<literal>rebar3</literal> and <literal>rebar3-open</literal>. The
<literal>rebar3</literal> version has been patched to remove the
ability to download anything from it. If you are not running it a
nix-shell or a nix-build then its probably not going to work for
you. <literal>rebar3-open</literal> is the normal, un-modified
rebar3. It should work exactly as would any other version of
rebar3. Any Erlang package should rely on
<literal>rebar3</literal> and thats really what you should be
using too.
</para>
</section>
<section xml:id="how-to-install-erlang-packages">
<title>How to install Erlang packages</title>
<para>
Erlang packages are not registered in the top level simply because
they are not relevant to the vast majority of Nix users. They are
installable using the <literal>erlangPackages</literal> attribute set.
You can list the avialable packages in the
<literal>erlangPackages</literal> with the following command:
</para>
<programlisting>
$ nix-env -f &quot;&lt;nixpkgs&gt;&quot; -qaP -A erlangPackages
erlangPackages.esqlite esqlite-0.2.1
erlangPackages.goldrush goldrush-0.1.7
erlangPackages.ibrowse ibrowse-4.2.2
erlangPackages.jiffy jiffy-0.14.5
erlangPackages.lager lager-3.0.2
erlangPackages.meck meck-0.8.3
erlangPackages.rebar3-pc pc-1.1.0
</programlisting>
<para>
To install any of those packages into your profile, refer to them by
their attribute path (first column):
</para>
<programlisting>
$ nix-env -f &quot;&lt;nixpkgs&gt;&quot; -iA erlangPackages.ibrowse
</programlisting>
<para>
The attribute path of any Erlang packages corresponds to the name
of that particular package in Hex or its OTP Application/Release name.
</para>
</section>
<section xml:id="packaging-erlang-applications">
<title>Packaging Erlang Applications</title>
<section xml:id="rebar3-packages">
<title>Rebar3 Packages</title>
<para>
There is a Nix functional called
<literal>buildRebar3</literal>. We use this function to make a
derivation that understands how to build the rebar3 project. For
example, the epression we use to build the <link
xlink:href="https://github.com/erlang-nix/hex2nix">hex2nix</link>
project follows.
</para>
<programlisting>
{stdenv, fetchFromGitHub, buildRebar3, ibrowse, jsx, erlware_commons }:
buildRebar3 rec {
name = "hex2nix";
version = "0.0.1";
src = fetchFromGitHub {
owner = "ericbmerritt";
repo = "hex2nix";
rev = "${version}";
sha256 = "1w7xjidz1l5yjmhlplfx7kphmnpvqm67w99hd2m7kdixwdxq0zqg";
};
erlangDeps = [ ibrowse jsx erlware_commons ];
}
</programlisting>
<para>
The only visible difference between this derivation and
something like <literal>stdenv.mkDerivation</literal> is that we
have added <literal>erlangDeps</literal> to the derivation. If
you add your Erlang dependencies here they will be correctly
handled by the system.
</para>
<para>
If your package needs to compile native code via Rebar's port
compilation mechenism. You should add <literal>compilePort =
true;</literal> to the derivation.
</para>
</section>
<section xml:id="hex-packages">
<title>Hex Packages</title>
<para>
Hex packages are based on Rebar packages. In fact, at the moment
we can only compile Hex packages that are buildable with
Rebar3. Packages that use Mix and other build systems are not
supported. That being said, we know a lot more about Hex and can
do more for you.
</para>
<programlisting>
{ buildHex }:
buildHex {
name = "esqlite";
version = "0.2.1";
sha256 = "1296fn1lz4lz4zqzn4dwc3flgkh0i6n4sydg501faabfbv8d3wkr";
compilePort = true;
}
</programlisting>
<para>
For Hex packages you need to provide the name, the version, and
the Sha 256 digest of the package and use
<literal>buildHex</literal> to build it. Obviously, the package
needs to have already been published to Hex.
</para>
</section>
</section>
<section xml:id="how-to-develop">
<title>How to develop</title>
<section xml:id="accessing-an-environment">
<title>Accessing an Environment</title>
<para>
Often, all you want to do is be able to access a valid
environment that contains a specific package and its
dependencies. we can do that with the <literal>env</literal>
part of a derivation. For example, lets say we want to access an
erlang repl with ibrowse loaded up. We could do the following.
</para>
<programlisting>
~/w/nixpkgs nix-shell -A erlangPackages.ibrowse.env --run "erl"
Erlang/OTP 18 [erts-7.0] [source] [64-bit] [smp:4:4] [async-threads:10] [hipe] [kernel-poll:false]
Eshell V7.0 (abort with ^G)
1> m(ibrowse).
Module: ibrowse
MD5: 3b3e0137d0cbb28070146978a3392945
Compiled: January 10 2016, 23:34
Object file: /nix/store/g1rlf65rdgjs4abbyj4grp37ry7ywivj-ibrowse-4.2.2/lib/erlang/lib/ibrowse-4.2.2/ebin/ibrowse.beam
Compiler options: [{outdir,"/tmp/nix-build-ibrowse-4.2.2.drv-0/hex-source-ibrowse-4.2.2/_build/default/lib/ibrowse/ebin"},
debug_info,debug_info,nowarn_shadow_vars,
warn_unused_import,warn_unused_vars,warnings_as_errors,
{i,"/tmp/nix-build-ibrowse-4.2.2.drv-0/hex-source-ibrowse-4.2.2/_build/default/lib/ibrowse/include"}]
Exports:
add_config/1 send_req_direct/7
all_trace_off/0 set_dest/3
code_change/3 set_max_attempts/3
get_config_value/1 set_max_pipeline_size/3
get_config_value/2 set_max_sessions/3
get_metrics/0 show_dest_status/0
get_metrics/2 show_dest_status/1
handle_call/3 show_dest_status/2
handle_cast/2 spawn_link_worker_process/1
handle_info/2 spawn_link_worker_process/2
init/1 spawn_worker_process/1
module_info/0 spawn_worker_process/2
module_info/1 start/0
rescan_config/0 start_link/0
rescan_config/1 stop/0
send_req/3 stop_worker_process/1
send_req/4 stream_close/1
send_req/5 stream_next/1
send_req/6 terminate/2
send_req_direct/4 trace_off/0
send_req_direct/5 trace_off/2
send_req_direct/6 trace_on/0
trace_on/2
ok
2>
</programlisting>
<para>
Notice the <literal>-A erlangPackages.ibrowse.env</literal>.That
is the key to this functionality.
</para>
</section>
<section xml:id="creating-a-shell">
<title>Creating a Shell</title>
<para>
Getting access to an environment often isn't enough to do real
development. Many times we need to create a
<literal>shell.nix</literal> file and do our development inside
of the environment specified by that file. This file looks a lot
like the packageing described above. The main difference is that
<literal>src</literal> points to project root and we call the
package directly.
</para>
<programlisting>
{ pkgs ? import &quot;&lt;nixpkgs&quot;&gt; {} }:
with pkgs;
let
f = { buildHex, ibrowse, jsx, erlware_commons }:
buildHex {
name = "hex2nix";
version = "0.1.0";
src = ./.;
erlangDeps = [ ibrowse jsx erlware_commons ];
};
drv = erlangPackages.callPackage f {};
in
drv
</programlisting>
<section xml:id="building-in-a-shell">
<title>Building in a shell</title>
<para>
Unfortunatly for us users of Nix, Rebar isn't very cooperative
with us from the standpoint of building a hermetic
environment. When building the rebar3 support we had to do some
sneaky things to get it not to go out and pull packages on its
own. Also unfortunately, you have to do some of the same things
when building a project inside of a Nix shell.
<orderedlist numeration="arabic">
<listitem>
<para>Run <literal>rebar3-nix-bootstrap</literal> every time
dependencies change</para>
</listitem>
<listitem>
<para>Set Home to the current directory.</para>
</listitem>
</orderedlist>
If you do these two things then Rebar will be happy with you. I
codify these into a makefile. Forunately, rebar3-nix-bootstrap
is idempotent and fairly quick. so you can run it as often as
you like.
</para>
<programlisting>
# =============================================================================
# Rules
# =============================================================================
.PHONY= all test clean repl shell build test analyze bootstrap
all: test
clean:
rm -rf _build
rm -rf .cache
repl:
nix-shell --run "erl"
shell:
nix-shell --run "bash"
bootstrap:
nix-shell --pure --run "rebar3-nix-bootstrap"
build: bootstrap
nix-shell --pure --run "HOME=$(CURDIR) rebar3 compile"
analyze: bootstrap
nix-shell --pure --run "HOME=$(CURDIR) rebar3 do compile,dialyzer"
test: bootstrap
nix-shell --pure --run "HOME=$(CURDIR) rebar3 do compile,dialyzer,eunit"
</programlisting>
<para>
If you add the <literal>shell.nix</literal> as described and
user rebar as follows things should simply work.
</para>
</section>
</section>
</section>
<section xml:id="generating-packages-from-hex-with-hex2nix">
<title>Generating Packages from Hex with Hex2Nix</title>
<para>
Updating the Hex packages requires the use of the
<literal>hex2nix</literal> tool. Given the path to the Erlang
modules (usually
<literal>pkgs/development/erlang-modules</literal>). It will
happily dump a file called
<literal>hex-packages.nix</literal>. That file will contain all
the packages that use a recognized build system in Hex. However,
it can't know whether or not all those packages are buildable.
</para>
<para>
To make life easier for our users, it makes good sense to go
ahead and attempt to build all those packages and remove the
ones that don't build. To do that, simply run the command (in
the root of your <literal>nixpkgs</literal> repository). that follows.
</para>
<programlisting>
$ nix-build -A erlangPackages
</programlisting>
<para>
That will build every package in
<literal>erlangPackages</literal>. Then you can go through and
manually remove the ones that fail. Hopefully, someone will
improve <literal>hex2nix</literal> in the future to automate
that.
</para>
</section>
</chapter>

View File

@@ -1,18 +1,629 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="chap-functions">
<title>Functions reference</title>
<title>Functions reference</title>
<para>
The nixpkgs repository has several utility functions to manipulate Nix expressions.
</para>
<section xml:id="sec-pkgs-overridePackages">
<title>pkgs.overridePackages</title>
<para>
This function inside the nixpkgs expression (<varname>pkgs</varname>)
can be used to override the set of packages itself.
</para>
<para>
Warning: this function is expensive and must not be used from within
the nixpkgs repository.
</para>
<para>
Example usage:
<programlisting>let
pkgs = import &lt;nixpkgs&gt; {};
newpkgs = pkgs.overridePackages (self: super: {
foo = super.foo.override { ... };
};
in ...</programlisting>
</para>
<para>
The resulting <varname>newpkgs</varname> will have the new <varname>foo</varname>
expression, and all other expressions depending on <varname>foo</varname> will also
use the new <varname>foo</varname> expression.
</para>
<para>
The behavior of this function is similar to <link
linkend="sec-modify-via-packageOverrides">config.packageOverrides</link>.
</para>
<para>
The <varname>self</varname> parameter refers to the final package set with the
applied overrides. Using this parameter may lead to infinite recursion if not
used consciously.
</para>
<para>
The <varname>super</varname> parameter refers to the old package set.
It's equivalent to <varname>pkgs</varname> in the above example.
</para>
</section>
<section xml:id="sec-pkg-override">
<title>&lt;pkg&gt;.override</title>
<para>
The function <varname>override</varname> is usually available for all the
derivations in the nixpkgs expression (<varname>pkgs</varname>).
</para>
<para>
It is used to override the arguments passed to a function.
</para>
<para>
Example usages:
<programlisting>pkgs.foo.override { arg1 = val1; arg2 = val2; ... }</programlisting>
<programlisting>pkgs.overridePackages (self: super: {
foo = super.foo.override { barSupport = true ; };
})</programlisting>
<programlisting>mypkg = pkgs.callPackage ./mypkg.nix {
mydep = pkgs.mydep.override { ... };
})</programlisting>
</para>
<para>
In the first example, <varname>pkgs.foo</varname> is the result of a function call
with some default arguments, usually a derivation.
Using <varname>pkgs.foo.override</varname> will call the same function with
the given new arguments.
</para>
</section>
<section xml:id="sec-pkg-overrideDerivation">
<title>&lt;pkg&gt;.overrideDerivation</title>
<warning>
<para>Do not use this function in Nixpkgs. Because it breaks
package abstraction and doesnt provide error checking for
function arguments, it is only intended for ad-hoc customisation
(such as in <filename>~/.nixpkgs/config.nix</filename>).</para>
</warning>
<para>
The function <varname>overrideDerivation</varname> is usually available for all the
derivations in the nixpkgs expression (<varname>pkgs</varname>).
</para>
<para>
It is used to create a new derivation by overriding the attributes of
the original derivation according to the given function.
</para>
<para>
Example usage:
<programlisting>mySed = pkgs.gnused.overrideDerivation (oldAttrs: {
name = "sed-4.2.2-pre";
src = fetchurl {
url = ftp://alpha.gnu.org/gnu/sed/sed-4.2.2-pre.tar.bz2;
sha256 = "11nq06d131y4wmf3drm0yk502d2xc6n5qy82cg88rb9nqd2lj41k";
};
patches = [];
});</programlisting>
</para>
<para>
In the above example, the name, src and patches of the derivation
will be overridden, while all other attributes will be retained from the
original derivation.
</para>
<para>
The argument <varname>oldAttrs</varname> is used to refer to the attribute set of
the original derivation.
</para>
</section>
<section xml:id="sec-lib-makeOverridable">
<title>lib.makeOverridable</title>
<para>
The function <varname>lib.makeOverridable</varname> is used to make the result
of a function easily customizable. This utility only makes sense for functions
that accept an argument set and return an attribute set.
</para>
<para>
Example usage:
<programlisting>f = { a, b }: { result = a+b; }
c = lib.makeOverridable f { a = 1; b = 2; }</programlisting>
</para>
<para>
The variable <varname>c</varname> is the value of the <varname>f</varname> function
applied with some default arguments. Hence the value of <varname>c.result</varname>
is <literal>3</literal>, in this example.
</para>
<para>
The variable <varname>c</varname> however also has some additional functions, like
<link linkend="sec-pkg-override">c.override</link> which can be used to
override the default arguments. In this example the value of
<varname>(c.override { a = 4; }).result</varname> is 6.
</para>
</section>
<section xml:id="sec-fhs-environments">
<title>buildFHSChrootEnv/buildFHSUserEnv</title>
<para>
<function>buildFHSChrootEnv</function> and
<function>buildFHSUserEnv</function> provide a way to build and run
FHS-compatible lightweight sandboxes. They get their own isolated root with
binded <filename>/nix/store</filename>, so their footprint in terms of disk
space needed is quite small. This allows one to run software which is hard or
unfeasible to patch for NixOS -- 3rd-party source trees with FHS assumptions,
games distributed as tarballs, software with integrity checking and/or external
self-updated binaries.
</para>
<para>
<function>buildFHSChrootEnv</function> allows to create persistent
environments, which can be constructed, deconstructed and entered by
multiple users at once. A downside is that it requires
<literal>root</literal> access for both those who create and destroy and
those who enter it. It can be useful to create environments for daemons that
one can enter and observe.
</para>
<para>
<function>buildFHSUserEnv</function> uses Linux namespaces feature to create
temporary lightweight environments which are destroyed after all child
processes exit. It does not require root access, and can be useful to create
sandboxes and wrap applications.
</para>
<para>
Those functions both rely on <function>buildFHSEnv</function>, which creates
an actual directory structure given a list of necessary packages and extra
build commands.
<function>buildFHSChrootEnv</function> and <function>buildFHSUserEnv</function>
both accept those arguments which are passed to
<function>buildFHSEnv</function>:
</para>
<variablelist>
<varlistentry>
<term><literal>name</literal></term>
<listitem><para>Environment name.</para></listitem>
</varlistentry>
<varlistentry>
<term><literal>targetPkgs</literal></term>
<listitem><para>Packages to be installed for the main host's architecture
(i.e. x86_64 on x86_64 installations).</para></listitem>
</varlistentry>
<varlistentry>
<term><literal>multiPkgs</literal></term>
<listitem><para>Packages to be installed for all architectures supported by
a host (i.e. i686 and x86_64 on x86_64 installations).</para></listitem>
</varlistentry>
<varlistentry>
<term><literal>extraBuildCommands</literal></term>
<listitem><para>Additional commands to be executed for finalizing the
directory structure.</para></listitem>
</varlistentry>
<varlistentry>
<term><literal>extraBuildCommandsMulti</literal></term>
<listitem><para>Like <literal>extraBuildCommandsMulti</literal>, but
executed only on multilib architectures.</para></listitem>
</varlistentry>
</variablelist>
<para>
Additionally, <function>buildFHSUserEnv</function> accepts
<literal>runScript</literal> parameter, which is a command that would be
executed inside the sandbox and passed all the command line arguments. It
default to <literal>bash</literal>.
</para>
<para>
It also uses <literal>CHROOTENV_EXTRA_BINDS</literal> environment variable
for binding extra directories in the sandbox to outside places. The format of
the variable is <literal>/mnt=test-mnt:/data</literal>, where
<literal>/mnt</literal> would be mounted as <literal>/test-mnt</literal>
and <literal>/data</literal> would be mounted as <literal>/data</literal>.
<literal>extraBindMounts</literal> array argument to
<function>buildFHSUserEnv</function> function is prepended to this variable.
Latter entries take priority if defined several times -- i.e. in case of
<literal>/data=data1:/data=data2</literal> the actual bind path would be
<literal>/data2</literal>.
</para>
<para>
One can create a simple environment using a <literal>shell.nix</literal>
like that:
</para>
<programlisting><![CDATA[
{ pkgs ? import <nixpkgs> {} }:
(pkgs.buildFHSUserEnv {
name = "simple-x11-env";
targetPkgs = pkgs: (with pkgs;
[ udev
alsaLib
]) ++ (with pkgs.xorg;
[ libX11
libXcursor
libXrandr
]);
multiPkgs = pkgs: (with pkgs;
[ udev
alsaLib
]);
runScript = "bash";
}).env
]]></programlisting>
<para>
Running <literal>nix-shell</literal> would then drop you into a shell with
these libraries and binaries available. You can use this to run
closed-source applications which expect FHS structure without hassles:
simply change <literal>runScript</literal> to the application path,
e.g. <filename>./bin/start.sh</filename> -- relative paths are supported.
</para>
</section>
<section xml:id="sec-pkgs-dockerTools">
<title>pkgs.dockerTools</title>
<para>
The nixpkgs repository has several utility functions to manipulate Nix
expressions.
<varname>pkgs.dockerTools</varname> is a set of functions for creating and
manipulating Docker images according to the
<link xlink:href="https://github.com/docker/docker/blob/master/image/spec/v1.md#docker-image-specification-v100">
Docker Image Specification v1.0.0
</link>. Docker itself is not used to perform any of the operations done by these
functions.
</para>
<xi:include href="functions/library.xml" />
<xi:include href="functions/overrides.xml" />
<xi:include href="functions/generators.xml" />
<xi:include href="functions/debug.xml" />
<xi:include href="functions/fhs-environments.xml" />
<xi:include href="functions/shell.xml" />
<xi:include href="functions/dockertools.xml" />
<warning>
<para>
The <varname>dockerTools</varname> API is unstable and may be subject to
backwards-incompatible changes in the future.
</para>
</warning>
<section xml:id="ssec-pkgs-dockerTools-buildImage">
<title>buildImage</title>
<para>
This function is analogous to the <command>docker build</command> command,
in that can used to build a Docker-compatible repository tarball containing
a single image with one or multiple layers. As such, the result
is suitable for being loaded in Docker with <command>docker load</command>.
</para>
<para>
The parameters of <varname>buildImage</varname> with relative example values are
described below:
</para>
<example xml:id='ex-dockerTools-buildImage'><title>Docker build</title>
<programlisting>
buildImage {
name = "redis"; <co xml:id='ex-dockerTools-buildImage-1' />
tag = "latest"; <co xml:id='ex-dockerTools-buildImage-2' />
fromImage = someBaseImage; <co xml:id='ex-dockerTools-buildImage-3' />
fromImageName = null; <co xml:id='ex-dockerTools-buildImage-4' />
fromImageTag = "latest"; <co xml:id='ex-dockerTools-buildImage-5' />
contents = pkgs.redis; <co xml:id='ex-dockerTools-buildImage-6' />
runAsRoot = '' <co xml:id='ex-dockerTools-buildImage-runAsRoot' />
#!${stdenv.shell}
mkdir -p /data
'';
config = { <co xml:id='ex-dockerTools-buildImage-8' />
Cmd = [ "/bin/redis-server" ];
WorkingDir = "/data";
Volumes = {
"/data" = {};
};
};
}
</programlisting>
</example>
<para>The above example will build a Docker image <literal>redis/latest</literal>
from the given base image. Loading and running this image in Docker results in
<literal>redis-server</literal> being started automatically.
</para>
<calloutlist>
<callout arearefs='ex-dockerTools-buildImage-1'>
<para>
<varname>name</varname> specifies the name of the resulting image.
This is the only required argument for <varname>buildImage</varname>.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-2'>
<para>
<varname>tag</varname> specifies the tag of the resulting image.
By default it's <literal>latest</literal>.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-3'>
<para>
<varname>fromImage</varname> is the repository tarball containing the base image.
It must be a valid Docker image, such as exported by <command>docker save</command>.
By default it's <literal>null</literal>, which can be seen as equivalent
to <literal>FROM scratch</literal> of a <filename>Dockerfile</filename>.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-4'>
<para>
<varname>fromImageName</varname> can be used to further specify
the base image within the repository, in case it contains multiple images.
By default it's <literal>null</literal>, in which case
<varname>buildImage</varname> will peek the first image available
in the repository.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-5'>
<para>
<varname>fromImageTag</varname> can be used to further specify the tag
of the base image within the repository, in case an image contains multiple tags.
By default it's <literal>null</literal>, in which case
<varname>buildImage</varname> will peek the first tag available for the base image.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-6'>
<para>
<varname>contents</varname> is a derivation that will be copied in the new
layer of the resulting image. This can be similarly seen as
<command>ADD contents/ /</command> in a <filename>Dockerfile</filename>.
By default it's <literal>null</literal>.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-runAsRoot'>
<para>
<varname>runAsRoot</varname> is a bash script that will run as root
in an environment that overlays the existing layers of the base image with
the new resulting layer, including the previously copied
<varname>contents</varname> derivation.
This can be similarly seen as
<command>RUN ...</command> in a <filename>Dockerfile</filename>.
<note>
<para>
Using this parameter requires the <literal>kvm</literal>
device to be available.
</para>
</note>
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-8'>
<para>
<varname>config</varname> is used to specify the configuration of the
containers that will be started off the built image in Docker.
The available options are listed in the
<link xlink:href="https://github.com/docker/docker/blob/master/image/spec/v1.md#container-runconfig-field-descriptions">
Docker Image Specification v1.0.0
</link>.
</para>
</callout>
</calloutlist>
<para>
After the new layer has been created, its closure
(to which <varname>contents</varname>, <varname>config</varname> and
<varname>runAsRoot</varname> contribute) will be copied in the layer itself.
Only new dependencies that are not already in the existing layers will be copied.
</para>
<para>
At the end of the process, only one new single layer will be produced and
added to the resulting image.
</para>
<para>
The resulting repository will only list the single image
<varname>image/tag</varname>. In the case of <xref linkend='ex-dockerTools-buildImage'/>
it would be <varname>redis/latest</varname>.
</para>
<para>
It is possible to inspect the arguments with which an image was built
using its <varname>buildArgs</varname> attribute.
</para>
</section>
<section xml:id="ssec-pkgs-dockerTools-fetchFromRegistry">
<title>pullImage</title>
<para>
This function is analogous to the <command>docker pull</command> command,
in that can be used to fetch a Docker image from a Docker registry.
Currently only registry <literal>v1</literal> is supported.
By default <link xlink:href="https://hub.docker.com/">Docker Hub</link>
is used to pull images.
</para>
<para>
Its parameters are described in the example below:
</para>
<example xml:id='ex-dockerTools-pullImage'><title>Docker pull</title>
<programlisting>
pullImage {
imageName = "debian"; <co xml:id='ex-dockerTools-pullImage-1' />
imageTag = "jessie"; <co xml:id='ex-dockerTools-pullImage-2' />
imageId = null; <co xml:id='ex-dockerTools-pullImage-3' />
sha256 = "1bhw5hkz6chrnrih0ymjbmn69hyfriza2lr550xyvpdrnbzr4gk2"; <co xml:id='ex-dockerTools-pullImage-4' />
indexUrl = "https://index.docker.io"; <co xml:id='ex-dockerTools-pullImage-5' />
registryVersion = "v1";
}
</programlisting>
</example>
<calloutlist>
<callout arearefs='ex-dockerTools-pullImage-1'>
<para>
<varname>imageName</varname> specifies the name of the image to be downloaded,
which can also include the registry namespace (e.g. <literal>library/debian</literal>).
This argument is required.
</para>
</callout>
<callout arearefs='ex-dockerTools-pullImage-2'>
<para>
<varname>imageTag</varname> specifies the tag of the image to be downloaded.
By default it's <literal>latest</literal>.
</para>
</callout>
<callout arearefs='ex-dockerTools-pullImage-3'>
<para>
<varname>imageId</varname>, if specified this exact image will be fetched, instead
of <varname>imageName/imageTag</varname>. However, the resulting repository
will still be named <varname>imageName/imageTag</varname>.
By default it's <literal>null</literal>.
</para>
</callout>
<callout arearefs='ex-dockerTools-pullImage-4'>
<para>
<varname>sha256</varname> is the checksum of the whole fetched image.
This argument is required.
</para>
<note>
<para>The checksum is computed on the unpacked directory, not on the final tarball.</para>
</note>
</callout>
<callout arearefs='ex-dockerTools-pullImage-5'>
<para>
In the above example the default values are shown for the variables
<varname>indexUrl</varname> and <varname>registryVersion</varname>.
Hence by default the Docker.io registry is used to pull the images.
</para>
</callout>
</calloutlist>
</section>
<section xml:id="ssec-pkgs-dockerTools-exportImage">
<title>exportImage</title>
<para>
This function is analogous to the <command>docker export</command> command,
in that can used to flatten a Docker image that contains multiple layers.
It is in fact the result of the merge of all the layers of the image.
As such, the result is suitable for being imported in Docker
with <command>docker import</command>.
</para>
<note>
<para>
Using this function requires the <literal>kvm</literal>
device to be available.
</para>
</note>
<para>
The parameters of <varname>exportImage</varname> are the following:
</para>
<example xml:id='ex-dockerTools-exportImage'><title>Docker export</title>
<programlisting>
exportImage {
fromImage = someLayeredImage;
fromImageName = null;
fromImageTag = null;
name = someLayeredImage.name;
}
</programlisting>
</example>
<para>
The parameters relative to the base image have the same synopsis as
described in <xref linkend='ssec-pkgs-dockerTools-buildImage'/>, except that
<varname>fromImage</varname> is the only required argument in this case.
</para>
<para>
The <varname>name</varname> argument is the name of the derivation output,
which defaults to <varname>fromImage.name</varname>.
</para>
</section>
<section xml:id="ssec-pkgs-dockerTools-shadowSetup">
<title>shadowSetup</title>
<para>
This constant string is a helper for setting up the base files for managing
users and groups, only if such files don't exist already.
It is suitable for being used in a
<varname>runAsRoot</varname> <xref linkend='ex-dockerTools-buildImage-runAsRoot'/> script for cases like
in the example below:
</para>
<example xml:id='ex-dockerTools-shadowSetup'><title>Shadow base files</title>
<programlisting>
buildImage {
name = "shadow-basic";
runAsRoot = ''
#!${stdenv.shell}
${shadowSetup}
groupadd -r redis
useradd -r -g redis redis
mkdir /data
chown redis:redis /data
'';
}
</programlisting>
</example>
<para>
Creating base files like <literal>/etc/passwd</literal> or
<literal>/etc/login.defs</literal> are necessary for shadow-utils to
manipulate users and groups.
</para>
</section>
</section>
</chapter>

View File

@@ -1,21 +0,0 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="sec-debug">
<title>Debugging Nix Expressions</title>
<para>
Nix is a unityped, dynamic language, this means every value can potentially
appear anywhere. Since it is also non-strict, evaluation order and what
ultimately is evaluated might surprise you. Therefore it is important to be
able to debug nix expressions.
</para>
<para>
In the <literal>lib/debug.nix</literal> file you will find a number of
functions that help (pretty-)printing values while evaluation is runnnig. You
can even specify how deep these values should be printed recursively, and
transform them on the fly. Please consult the docstrings in
<literal>lib/debug.nix</literal> for usage information.
</para>
</section>

View File

@@ -1,564 +0,0 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="sec-pkgs-dockerTools">
<title>pkgs.dockerTools</title>
<para>
<varname>pkgs.dockerTools</varname> is a set of functions for creating and
manipulating Docker images according to the
<link xlink:href="https://github.com/moby/moby/blob/master/image/spec/v1.2.md#docker-image-specification-v120">
Docker Image Specification v1.2.0 </link>. Docker itself is not used to
perform any of the operations done by these functions.
</para>
<warning>
<para>
The <varname>dockerTools</varname> API is unstable and may be subject to
backwards-incompatible changes in the future.
</para>
</warning>
<section xml:id="ssec-pkgs-dockerTools-buildImage">
<title>buildImage</title>
<para>
This function is analogous to the <command>docker build</command> command,
in that can used to build a Docker-compatible repository tarball containing
a single image with one or multiple layers. As such, the result is suitable
for being loaded in Docker with <command>docker load</command>.
</para>
<para>
The parameters of <varname>buildImage</varname> with relative example values
are described below:
</para>
<example xml:id='ex-dockerTools-buildImage'>
<title>Docker build</title>
<programlisting>
buildImage {
name = "redis"; <co xml:id='ex-dockerTools-buildImage-1' />
tag = "latest"; <co xml:id='ex-dockerTools-buildImage-2' />
fromImage = someBaseImage; <co xml:id='ex-dockerTools-buildImage-3' />
fromImageName = null; <co xml:id='ex-dockerTools-buildImage-4' />
fromImageTag = "latest"; <co xml:id='ex-dockerTools-buildImage-5' />
contents = pkgs.redis; <co xml:id='ex-dockerTools-buildImage-6' />
runAsRoot = '' <co xml:id='ex-dockerTools-buildImage-runAsRoot' />
#!${stdenv.shell}
mkdir -p /data
'';
config = { <co xml:id='ex-dockerTools-buildImage-8' />
Cmd = [ "/bin/redis-server" ];
WorkingDir = "/data";
Volumes = {
"/data" = {};
};
};
}
</programlisting>
</example>
<para>
The above example will build a Docker image <literal>redis/latest</literal>
from the given base image. Loading and running this image in Docker results
in <literal>redis-server</literal> being started automatically.
</para>
<calloutlist>
<callout arearefs='ex-dockerTools-buildImage-1'>
<para>
<varname>name</varname> specifies the name of the resulting image. This is
the only required argument for <varname>buildImage</varname>.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-2'>
<para>
<varname>tag</varname> specifies the tag of the resulting image. By
default it's <literal>null</literal>, which indicates that the nix output
hash will be used as tag.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-3'>
<para>
<varname>fromImage</varname> is the repository tarball containing the base
image. It must be a valid Docker image, such as exported by
<command>docker save</command>. By default it's <literal>null</literal>,
which can be seen as equivalent to <literal>FROM scratch</literal> of a
<filename>Dockerfile</filename>.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-4'>
<para>
<varname>fromImageName</varname> can be used to further specify the base
image within the repository, in case it contains multiple images. By
default it's <literal>null</literal>, in which case
<varname>buildImage</varname> will peek the first image available in the
repository.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-5'>
<para>
<varname>fromImageTag</varname> can be used to further specify the tag of
the base image within the repository, in case an image contains multiple
tags. By default it's <literal>null</literal>, in which case
<varname>buildImage</varname> will peek the first tag available for the
base image.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-6'>
<para>
<varname>contents</varname> is a derivation that will be copied in the new
layer of the resulting image. This can be similarly seen as <command>ADD
contents/ /</command> in a <filename>Dockerfile</filename>. By default
it's <literal>null</literal>.
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-runAsRoot'>
<para>
<varname>runAsRoot</varname> is a bash script that will run as root in an
environment that overlays the existing layers of the base image with the
new resulting layer, including the previously copied
<varname>contents</varname> derivation. This can be similarly seen as
<command>RUN ...</command> in a <filename>Dockerfile</filename>.
<note>
<para>
Using this parameter requires the <literal>kvm</literal> device to be
available.
</para>
</note>
</para>
</callout>
<callout arearefs='ex-dockerTools-buildImage-8'>
<para>
<varname>config</varname> is used to specify the configuration of the
containers that will be started off the built image in Docker. The
available options are listed in the
<link xlink:href="https://github.com/moby/moby/blob/master/image/spec/v1.2.md#image-json-field-descriptions">
Docker Image Specification v1.2.0 </link>.
</para>
</callout>
</calloutlist>
<para>
After the new layer has been created, its closure (to which
<varname>contents</varname>, <varname>config</varname> and
<varname>runAsRoot</varname> contribute) will be copied in the layer itself.
Only new dependencies that are not already in the existing layers will be
copied.
</para>
<para>
At the end of the process, only one new single layer will be produced and
added to the resulting image.
</para>
<para>
The resulting repository will only list the single image
<varname>image/tag</varname>. In the case of
<xref linkend='ex-dockerTools-buildImage'/> it would be
<varname>redis/latest</varname>.
</para>
<para>
It is possible to inspect the arguments with which an image was built using
its <varname>buildArgs</varname> attribute.
</para>
<note>
<para>
If you see errors similar to <literal>getProtocolByName: does not exist (no
such protocol name: tcp)</literal> you may need to add
<literal>pkgs.iana-etc</literal> to <varname>contents</varname>.
</para>
</note>
<note>
<para>
If you see errors similar to <literal>Error_Protocol ("certificate has
unknown CA",True,UnknownCa)</literal> you may need to add
<literal>pkgs.cacert</literal> to <varname>contents</varname>.
</para>
</note>
<example xml:id="example-pkgs-dockerTools-buildImage-creation-date">
<title>Impurely Defining a Docker Layer's Creation Date</title>
<para>
By default <function>buildImage</function> will use a static date of one
second past the UNIX Epoch. This allows <function>buildImage</function> to
produce binary reproducible images. When listing images with
<command>docker list images</command>, the newly created images will be
listed like this:
</para>
<screen><![CDATA[
$ docker image list
REPOSITORY TAG IMAGE ID CREATED SIZE
hello latest 08c791c7846e 48 years ago 25.2MB
]]></screen>
<para>
You can break binary reproducibility but have a sorted, meaningful
<literal>CREATED</literal> column by setting <literal>created</literal> to
<literal>now</literal>.
</para>
<programlisting><![CDATA[
pkgs.dockerTools.buildImage {
name = "hello";
tag = "latest";
created = "now";
contents = pkgs.hello;
config.Cmd = [ "/bin/hello" ];
}
]]></programlisting>
<para>
and now the Docker CLI will display a reasonable date and sort the images
as expected:
<screen><![CDATA[
$ docker image list
REPOSITORY TAG IMAGE ID CREATED SIZE
hello latest de2bf4786de6 About a minute ago 25.2MB
]]></screen>
however, the produced images will not be binary reproducible.
</para>
</example>
</section>
<section xml:id="ssec-pkgs-dockerTools-buildLayeredImage">
<title>buildLayeredImage</title>
<para>
Create a Docker image with many of the store paths being on their own layer
to improve sharing between images.
</para>
<variablelist>
<varlistentry>
<term>
<varname>name</varname>
</term>
<listitem>
<para>
The name of the resulting image.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>tag</varname> <emphasis>optional</emphasis>
</term>
<listitem>
<para>
Tag of the generated image.
</para>
<para>
<emphasis>Default:</emphasis> the output path's hash
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>contents</varname> <emphasis>optional</emphasis>
</term>
<listitem>
<para>
Top level paths in the container. Either a single derivation, or a list
of derivations.
</para>
<para>
<emphasis>Default:</emphasis> <literal>[]</literal>
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>config</varname> <emphasis>optional</emphasis>
</term>
<listitem>
<para>
Run-time configuration of the container. A full list of the options are
available at in the
<link xlink:href="https://github.com/moby/moby/blob/master/image/spec/v1.2.md#image-json-field-descriptions">
Docker Image Specification v1.2.0 </link>.
</para>
<para>
<emphasis>Default:</emphasis> <literal>{}</literal>
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>created</varname> <emphasis>optional</emphasis>
</term>
<listitem>
<para>
Date and time the layers were created. Follows the same
<literal>now</literal> exception supported by
<literal>buildImage</literal>.
</para>
<para>
<emphasis>Default:</emphasis> <literal>1970-01-01T00:00:01Z</literal>
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>maxLayers</varname> <emphasis>optional</emphasis>
</term>
<listitem>
<para>
Maximum number of layers to create.
</para>
<para>
<emphasis>Default:</emphasis> <literal>24</literal>
</para>
</listitem>
</varlistentry>
</variablelist>
<section xml:id="dockerTools-buildLayeredImage-arg-contents">
<title>Behavior of <varname>contents</varname> in the final image</title>
<para>
Each path directly listed in <varname>contents</varname> will have a
symlink in the root of the image.
</para>
<para>
For example:
<programlisting><![CDATA[
pkgs.dockerTools.buildLayeredImage {
name = "hello";
contents = [ pkgs.hello ];
}
]]></programlisting>
will create symlinks for all the paths in the <literal>hello</literal>
package:
<screen><![CDATA[
/bin/hello -> /nix/store/h1zb1padqbbb7jicsvkmrym3r6snphxg-hello-2.10/bin/hello
/share/info/hello.info -> /nix/store/h1zb1padqbbb7jicsvkmrym3r6snphxg-hello-2.10/share/info/hello.info
/share/locale/bg/LC_MESSAGES/hello.mo -> /nix/store/h1zb1padqbbb7jicsvkmrym3r6snphxg-hello-2.10/share/locale/bg/LC_MESSAGES/hello.mo
]]></screen>
</para>
</section>
<section xml:id="dockerTools-buildLayeredImage-arg-config">
<title>Automatic inclusion of <varname>config</varname> references</title>
<para>
The closure of <varname>config</varname> is automatically included in the
closure of the final image.
</para>
<para>
This allows you to make very simple Docker images with very little code.
This container will start up and run <command>hello</command>:
<programlisting><![CDATA[
pkgs.dockerTools.buildLayeredImage {
name = "hello";
config.Cmd = [ "${pkgs.hello}/bin/hello" ];
}
]]></programlisting>
</para>
</section>
<section xml:id="dockerTools-buildLayeredImage-arg-maxLayers">
<title>Adjusting <varname>maxLayers</varname></title>
<para>
Increasing the <varname>maxLayers</varname> increases the number of layers
which have a chance to be shared between different images.
</para>
<para>
Modern Docker installations support up to 128 layers, however older
versions support as few as 42.
</para>
<para>
If the produced image will not be extended by other Docker builds, it is
safe to set <varname>maxLayers</varname> to <literal>128</literal>. However
it will be impossible to extend the image further.
</para>
<para>
The first (<literal>maxLayers-2</literal>) most "popular" paths will have
their own individual layers, then layer #<literal>maxLayers-1</literal>
will contain all the remaining "unpopular" paths, and finally layer
#<literal>maxLayers</literal> will contain the Image configuration.
</para>
<para>
Docker's Layers are not inherently ordered, they are content-addressable
and are not explicitly layered until they are composed in to an Image.
</para>
</section>
</section>
<section xml:id="ssec-pkgs-dockerTools-fetchFromRegistry">
<title>pullImage</title>
<para>
This function is analogous to the <command>docker pull</command> command, in
that can be used to pull a Docker image from a Docker registry. By default
<link xlink:href="https://hub.docker.com/">Docker Hub</link> is used to pull
images.
</para>
<para>
Its parameters are described in the example below:
</para>
<example xml:id='ex-dockerTools-pullImage'>
<title>Docker pull</title>
<programlisting>
pullImage {
imageName = "nixos/nix"; <co xml:id='ex-dockerTools-pullImage-1' />
imageDigest = "sha256:20d9485b25ecfd89204e843a962c1bd70e9cc6858d65d7f5fadc340246e2116b"; <co xml:id='ex-dockerTools-pullImage-2' />
finalImageTag = "1.11"; <co xml:id='ex-dockerTools-pullImage-3' />
sha256 = "0mqjy3zq2v6rrhizgb9nvhczl87lcfphq9601wcprdika2jz7qh8"; <co xml:id='ex-dockerTools-pullImage-4' />
os = "linux"; <co xml:id='ex-dockerTools-pullImage-5' />
arch = "x86_64"; <co xml:id='ex-dockerTools-pullImage-6' />
}
</programlisting>
</example>
<calloutlist>
<callout arearefs='ex-dockerTools-pullImage-1'>
<para>
<varname>imageName</varname> specifies the name of the image to be
downloaded, which can also include the registry namespace (e.g.
<literal>nixos</literal>). This argument is required.
</para>
</callout>
<callout arearefs='ex-dockerTools-pullImage-2'>
<para>
<varname>imageDigest</varname> specifies the digest of the image to be
downloaded. Skopeo can be used to get the digest of an image, with its
<varname>inspect</varname> subcommand. Since a given
<varname>imageName</varname> may transparently refer to a manifest list of
images which support multiple architectures and/or operating systems,
supply the `--override-os` and `--override-arch` arguments to specify
exactly which image you want. By default it will match the OS and
architecture of the host the command is run on.
<programlisting>
$ nix-shell --packages skopeo jq --command "skopeo --override-os linux --override-arch x86_64 inspect docker://docker.io/nixos/nix:1.11 | jq -r '.Digest'"
sha256:20d9485b25ecfd89204e843a962c1bd70e9cc6858d65d7f5fadc340246e2116b
</programlisting>
This argument is required.
</para>
</callout>
<callout arearefs='ex-dockerTools-pullImage-3'>
<para>
<varname>finalImageTag</varname>, if specified, this is the tag of the
image to be created. Note it is never used to fetch the image since we
prefer to rely on the immutable digest ID. By default it's
<literal>latest</literal>.
</para>
</callout>
<callout arearefs='ex-dockerTools-pullImage-4'>
<para>
<varname>sha256</varname> is the checksum of the whole fetched image. This
argument is required.
</para>
</callout>
<callout arearefs='ex-dockerTools-pullImage-5'>
<para>
<varname>os</varname>, if specified, is the operating system of the
fetched image. By default it's <literal>linux</literal>.
</para>
</callout>
<callout arearefs='ex-dockerTools-pullImage-6'>
<para>
<varname>arch</varname>, if specified, is the cpu architecture of the
fetched image. By default it's <literal>x86_64</literal>.
</para>
</callout>
</calloutlist>
</section>
<section xml:id="ssec-pkgs-dockerTools-exportImage">
<title>exportImage</title>
<para>
This function is analogous to the <command>docker export</command> command,
in that can used to flatten a Docker image that contains multiple layers. It
is in fact the result of the merge of all the layers of the image. As such,
the result is suitable for being imported in Docker with <command>docker
import</command>.
</para>
<note>
<para>
Using this function requires the <literal>kvm</literal> device to be
available.
</para>
</note>
<para>
The parameters of <varname>exportImage</varname> are the following:
</para>
<example xml:id='ex-dockerTools-exportImage'>
<title>Docker export</title>
<programlisting>
exportImage {
fromImage = someLayeredImage;
fromImageName = null;
fromImageTag = null;
name = someLayeredImage.name;
}
</programlisting>
</example>
<para>
The parameters relative to the base image have the same synopsis as
described in <xref linkend='ssec-pkgs-dockerTools-buildImage'/>, except that
<varname>fromImage</varname> is the only required argument in this case.
</para>
<para>
The <varname>name</varname> argument is the name of the derivation output,
which defaults to <varname>fromImage.name</varname>.
</para>
</section>
<section xml:id="ssec-pkgs-dockerTools-shadowSetup">
<title>shadowSetup</title>
<para>
This constant string is a helper for setting up the base files for managing
users and groups, only if such files don't exist already. It is suitable for
being used in a <varname>runAsRoot</varname>
<xref linkend='ex-dockerTools-buildImage-runAsRoot'/> script for cases like
in the example below:
</para>
<example xml:id='ex-dockerTools-shadowSetup'>
<title>Shadow base files</title>
<programlisting>
buildImage {
name = "shadow-basic";
runAsRoot = ''
#!${stdenv.shell}
${shadowSetup}
groupadd -r redis
useradd -r -g redis redis
mkdir /data
chown redis:redis /data
'';
}
</programlisting>
</example>
<para>
Creating base files like <literal>/etc/passwd</literal> or
<literal>/etc/login.defs</literal> are necessary for shadow-utils to
manipulate users and groups.
</para>
</section>
</section>

View File

@@ -1,142 +0,0 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="sec-fhs-environments">
<title>buildFHSUserEnv</title>
<para>
<function>buildFHSUserEnv</function> provides a way to build and run
FHS-compatible lightweight sandboxes. It creates an isolated root with bound
<filename>/nix/store</filename>, so its footprint in terms of disk space
needed is quite small. This allows one to run software which is hard or
unfeasible to patch for NixOS -- 3rd-party source trees with FHS assumptions,
games distributed as tarballs, software with integrity checking and/or
external self-updated binaries. It uses Linux namespaces feature to create
temporary lightweight environments which are destroyed after all child
processes exit, without root user rights requirement. Accepted arguments are:
</para>
<variablelist>
<varlistentry>
<term>
<literal>name</literal>
</term>
<listitem>
<para>
Environment name.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<literal>targetPkgs</literal>
</term>
<listitem>
<para>
Packages to be installed for the main host's architecture (i.e. x86_64 on
x86_64 installations). Along with libraries binaries are also installed.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<literal>multiPkgs</literal>
</term>
<listitem>
<para>
Packages to be installed for all architectures supported by a host (i.e.
i686 and x86_64 on x86_64 installations). Only libraries are installed by
default.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<literal>extraBuildCommands</literal>
</term>
<listitem>
<para>
Additional commands to be executed for finalizing the directory structure.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<literal>extraBuildCommandsMulti</literal>
</term>
<listitem>
<para>
Like <literal>extraBuildCommands</literal>, but executed only on multilib
architectures.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<literal>extraOutputsToInstall</literal>
</term>
<listitem>
<para>
Additional derivation outputs to be linked for both target and
multi-architecture packages.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<literal>extraInstallCommands</literal>
</term>
<listitem>
<para>
Additional commands to be executed for finalizing the derivation with
runner script.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<literal>runScript</literal>
</term>
<listitem>
<para>
A command that would be executed inside the sandbox and passed all the
command line arguments. It defaults to <literal>bash</literal>.
</para>
</listitem>
</varlistentry>
</variablelist>
<para>
One can create a simple environment using a <literal>shell.nix</literal> like
that:
</para>
<programlisting><![CDATA[
{ pkgs ? import <nixpkgs> {} }:
(pkgs.buildFHSUserEnv {
name = "simple-x11-env";
targetPkgs = pkgs: (with pkgs;
[ udev
alsaLib
]) ++ (with pkgs.xorg;
[ libX11
libXcursor
libXrandr
]);
multiPkgs = pkgs: (with pkgs;
[ udev
alsaLib
]);
runScript = "bash";
}).env
]]></programlisting>
<para>
Running <literal>nix-shell</literal> would then drop you into a shell with
these libraries and binaries available. You can use this to run closed-source
applications which expect FHS structure without hassles: simply change
<literal>runScript</literal> to the application path, e.g.
<filename>./bin/start.sh</filename> -- relative paths are supported.
</para>
</section>

View File

@@ -1,89 +0,0 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="sec-generators">
<title>Generators</title>
<para>
Generators are functions that create file formats from nix data structures,
e.g. for configuration files. There are generators available for:
<literal>INI</literal>, <literal>JSON</literal> and <literal>YAML</literal>
</para>
<para>
All generators follow a similar call interface: <code>generatorName
configFunctions data</code>, where <literal>configFunctions</literal> is an
attrset of user-defined functions that format nested parts of the content.
They each have common defaults, so often they do not need to be set manually.
An example is <code>mkSectionName ? (name: libStr.escape [ "[" "]" ]
name)</code> from the <literal>INI</literal> generator. It receives the name
of a section and sanitizes it. The default <literal>mkSectionName</literal>
escapes <literal>[</literal> and <literal>]</literal> with a backslash.
</para>
<para>
Generators can be fine-tuned to produce exactly the file format required by
your application/service. One example is an INI-file format which uses
<literal>: </literal> as separator, the strings
<literal>"yes"</literal>/<literal>"no"</literal> as boolean values and
requires all string values to be quoted:
</para>
<programlisting>
with lib;
let
customToINI = generators.toINI {
# specifies how to format a key/value pair
mkKeyValue = generators.mkKeyValueDefault {
# specifies the generated string for a subset of nix values
mkValueString = v:
if v == true then ''"yes"''
else if v == false then ''"no"''
else if isString v then ''"${v}"''
# and delegats all other values to the default generator
else generators.mkValueStringDefault {} v;
} ":";
};
# the INI file can now be given as plain old nix values
in customToINI {
main = {
pushinfo = true;
autopush = false;
host = "localhost";
port = 42;
};
mergetool = {
merge = "diff3";
};
}
</programlisting>
<para>
This will produce the following INI file as nix string:
</para>
<programlisting>
[main]
autopush:"no"
host:"localhost"
port:42
pushinfo:"yes"
str\:ange:"very::strange"
[mergetool]
merge:"diff3"
</programlisting>
<note>
<para>
Nix store paths can be converted to strings by enclosing a derivation
attribute like so: <code>"${drv}"</code>.
</para>
</note>
<para>
Detailed documentation for each generator can be found in
<literal>lib/generators.nix</literal>.
</para>
</section>

View File

@@ -1,13 +0,0 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="sec-functions-library">
<title>Nixpkgs Library Functions</title>
<para>
Nixpkgs provides a standard library at <varname>pkgs.lib</varname>, or
through <code>import &lt;nixpkgs/lib&gt;</code>.
</para>
<xi:include href="./library/attrsets.xml" />
</section>

File diff suppressed because it is too large Load Diff

View File

@@ -1,193 +0,0 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="sec-overrides">
<title>Overriding</title>
<para>
Sometimes one wants to override parts of <literal>nixpkgs</literal>, e.g.
derivation attributes, the results of derivations or even the whole package
set.
</para>
<section xml:id="sec-pkg-override">
<title>&lt;pkg&gt;.override</title>
<para>
The function <varname>override</varname> is usually available for all the
derivations in the nixpkgs expression (<varname>pkgs</varname>).
</para>
<para>
It is used to override the arguments passed to a function.
</para>
<para>
Example usages:
<programlisting>pkgs.foo.override { arg1 = val1; arg2 = val2; ... }</programlisting>
<programlisting>import pkgs.path { overlays = [ (self: super: {
foo = super.foo.override { barSupport = true ; };
})]};</programlisting>
<programlisting>mypkg = pkgs.callPackage ./mypkg.nix {
mydep = pkgs.mydep.override { ... };
}</programlisting>
</para>
<para>
In the first example, <varname>pkgs.foo</varname> is the result of a
function call with some default arguments, usually a derivation. Using
<varname>pkgs.foo.override</varname> will call the same function with the
given new arguments.
</para>
</section>
<section xml:id="sec-pkg-overrideAttrs">
<title>&lt;pkg&gt;.overrideAttrs</title>
<para>
The function <varname>overrideAttrs</varname> allows overriding the
attribute set passed to a <varname>stdenv.mkDerivation</varname> call,
producing a new derivation based on the original one. This function is
available on all derivations produced by the
<varname>stdenv.mkDerivation</varname> function, which is most packages in
the nixpkgs expression <varname>pkgs</varname>.
</para>
<para>
Example usage:
<programlisting>helloWithDebug = pkgs.hello.overrideAttrs (oldAttrs: rec {
separateDebugInfo = true;
});</programlisting>
</para>
<para>
In the above example, the <varname>separateDebugInfo</varname> attribute is
overridden to be true, thus building debug info for
<varname>helloWithDebug</varname>, while all other attributes will be
retained from the original <varname>hello</varname> package.
</para>
<para>
The argument <varname>oldAttrs</varname> is conventionally used to refer to
the attr set originally passed to <varname>stdenv.mkDerivation</varname>.
</para>
<note>
<para>
Note that <varname>separateDebugInfo</varname> is processed only by the
<varname>stdenv.mkDerivation</varname> function, not the generated, raw Nix
derivation. Thus, using <varname>overrideDerivation</varname> will not work
in this case, as it overrides only the attributes of the final derivation.
It is for this reason that <varname>overrideAttrs</varname> should be
preferred in (almost) all cases to <varname>overrideDerivation</varname>,
i.e. to allow using <varname>sdenv.mkDerivation</varname> to process input
arguments, as well as the fact that it is easier to use (you can use the
same attribute names you see in your Nix code, instead of the ones
generated (e.g. <varname>buildInputs</varname> vs
<varname>nativeBuildInputs</varname>, and involves less typing.
</para>
</note>
</section>
<section xml:id="sec-pkg-overrideDerivation">
<title>&lt;pkg&gt;.overrideDerivation</title>
<warning>
<para>
You should prefer <varname>overrideAttrs</varname> in almost all cases, see
its documentation for the reasons why.
<varname>overrideDerivation</varname> is not deprecated and will continue
to work, but is less nice to use and does not have as many abilities as
<varname>overrideAttrs</varname>.
</para>
</warning>
<warning>
<para>
Do not use this function in Nixpkgs as it evaluates a Derivation before
modifying it, which breaks package abstraction and removes error-checking
of function arguments. In addition, this evaluation-per-function
application incurs a performance penalty, which can become a problem if
many overrides are used. It is only intended for ad-hoc customisation, such
as in <filename>~/.config/nixpkgs/config.nix</filename>.
</para>
</warning>
<para>
The function <varname>overrideDerivation</varname> creates a new derivation
based on an existing one by overriding the original's attributes with the
attribute set produced by the specified function. This function is available
on all derivations defined using the <varname>makeOverridable</varname>
function. Most standard derivation-producing functions, such as
<varname>stdenv.mkDerivation</varname>, are defined using this function,
which means most packages in the nixpkgs expression,
<varname>pkgs</varname>, have this function.
</para>
<para>
Example usage:
<programlisting>mySed = pkgs.gnused.overrideDerivation (oldAttrs: {
name = "sed-4.2.2-pre";
src = fetchurl {
url = ftp://alpha.gnu.org/gnu/sed/sed-4.2.2-pre.tar.bz2;
sha256 = "11nq06d131y4wmf3drm0yk502d2xc6n5qy82cg88rb9nqd2lj41k";
};
patches = [];
});</programlisting>
</para>
<para>
In the above example, the <varname>name</varname>, <varname>src</varname>,
and <varname>patches</varname> of the derivation will be overridden, while
all other attributes will be retained from the original derivation.
</para>
<para>
The argument <varname>oldAttrs</varname> is used to refer to the attribute
set of the original derivation.
</para>
<note>
<para>
A package's attributes are evaluated *before* being modified by the
<varname>overrideDerivation</varname> function. For example, the
<varname>name</varname> attribute reference in <varname>url =
"mirror://gnu/hello/${name}.tar.gz";</varname> is filled-in *before* the
<varname>overrideDerivation</varname> function modifies the attribute set.
This means that overriding the <varname>name</varname> attribute, in this
example, *will not* change the value of the <varname>url</varname>
attribute. Instead, we need to override both the <varname>name</varname>
*and* <varname>url</varname> attributes.
</para>
</note>
</section>
<section xml:id="sec-lib-makeOverridable">
<title>lib.makeOverridable</title>
<para>
The function <varname>lib.makeOverridable</varname> is used to make the
result of a function easily customizable. This utility only makes sense for
functions that accept an argument set and return an attribute set.
</para>
<para>
Example usage:
<programlisting>f = { a, b }: { result = a+b; }
c = lib.makeOverridable f { a = 1; b = 2; }</programlisting>
</para>
<para>
The variable <varname>c</varname> is the value of the <varname>f</varname>
function applied with some default arguments. Hence the value of
<varname>c.result</varname> is <literal>3</literal>, in this example.
</para>
<para>
The variable <varname>c</varname> however also has some additional
functions, like <link linkend="sec-pkg-override">c.override</link> which can
be used to override the default arguments. In this example the value of
<varname>(c.override { a = 4; }).result</varname> is 6.
</para>
</section>
</section>

View File

@@ -1,26 +0,0 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="sec-pkgs-mkShell">
<title>pkgs.mkShell</title>
<para>
<function>pkgs.mkShell</function> is a special kind of derivation that is
only useful when using it combined with <command>nix-shell</command>. It will
in fact fail to instantiate when invoked with <command>nix-build</command>.
</para>
<section xml:id="sec-pkgs-mkShell-usage">
<title>Usage</title>
<programlisting><![CDATA[
{ pkgs ? import <nixpkgs> {} }:
pkgs.mkShell {
# this will make all the build inputs from hello and gnutar
# available to the shell environment
inputsFrom = with pkgs; [ hello gnutar ];
buildInputs = [ pkgs.gnumake ];
}
]]></programlisting>
</section>
</section>

784
doc/haskell-users-guide.md Normal file
View File

@@ -0,0 +1,784 @@
---
title: User's Guide for Haskell in Nixpkgs
author: Peter Simons
date: 2015-06-01
---
# User's Guide to the Haskell Infrastructure
## How to install Haskell packages
Nixpkgs distributes build instructions for all Haskell packages registered on
[Hackage](http://hackage.haskell.org/), but strangely enough normal Nix package
lookups don't seem to discover any of them, except for the default version of ghc, cabal-install, and stack:
$ nix-env -i alex
error: selector alex matches no derivations
$ nix-env -qa ghc
ghc-7.10.2
The Haskell package set is not registered in the top-level namespace because it
is *huge*. If all Haskell packages were visible to these commands, then
name-based search/install operations would be much slower than they are now. We
avoided that by keeping all Haskell-related packages in a separate attribute
set called `haskellPackages`, which the following command will list:
$ nix-env -f "<nixpkgs>" -qaP -A haskellPackages
haskellPackages.a50 a50-0.5
haskellPackages.abacate haskell-abacate-0.0.0.0
haskellPackages.abcBridge haskell-abcBridge-0.12
haskellPackages.afv afv-0.1.1
haskellPackages.alex alex-3.1.4
haskellPackages.Allure Allure-0.4.101.1
haskellPackages.alms alms-0.6.7
[... some 8000 entries omitted ...]
To install any of those packages into your profile, refer to them by their
attribute path (first column):
$ nix-env -f "<nixpkgs>" -iA haskellPackages.Allure ...
The attribute path of any Haskell packages corresponds to the name of that
particular package on Hackage: the package `cabal-install` has the attribute
`haskellPackages.cabal-install`, and so on. (Actually, this convention causes
trouble with packages like `3dmodels` and `4Blocks`, because these names are
invalid identifiers in the Nix language. The issue of how to deal with these
rare corner cases is currently unresolved.)
Haskell packages who's Nix name (second column) begins with a `haskell-` prefix
are packages that provide a library whereas packages without that prefix
provide just executables. Libraries may provide executables too, though: the
package `haskell-pandoc`, for example, installs both a library and an
application. You can install and use Haskell executables just like any other
program in Nixpkgs, but using Haskell libraries for development is a bit
trickier and we'll address that subject in great detail in section [How to
create a development environment].
Attribute paths are deterministic inside of Nixpkgs, but the path necessary to
reach Nixpkgs varies from system to system. We dodged that problem by giving
`nix-env` an explicit `-f "<nixpkgs>"` parameter, but if you call `nix-env`
without that flag, then chances are the invocation fails:
$ nix-env -iA haskellPackages.cabal-install
error: attribute haskellPackages in selection path
haskellPackages.cabal-install not found
On NixOS, for example, Nixpkgs does *not* exist in the top-level namespace by
default. To figure out the proper attribute path, it's easiest to query for the
path of a well-known Nixpkgs package, i.e.:
$ nix-env -qaP coreutils
nixos.coreutils coreutils-8.23
If your system responds like that (most NixOS installations will), then the
attribute path to `haskellPackages` is `nixos.haskellPackages`. Thus, if you
want to use `nix-env` without giving an explicit `-f` flag, then that's the way
to do it:
$ nix-env -qaP -A nixos.haskellPackages
$ nix-env -iA nixos.haskellPackages.cabal-install
Our current default compiler is GHC 7.10.x and the `haskellPackages` set
contains packages built with that particular version. Nixpkgs contains the
latest major release of every GHC since 6.10.4, however, and there is a whole
family of package sets available that defines Hackage packages built with each
of those compilers, too:
$ nix-env -f "<nixpkgs>" -qaP -A haskell.packages.ghc6123
$ nix-env -f "<nixpkgs>" -qaP -A haskell.packages.ghc763
The name `haskellPackages` is really just a synonym for
`haskell.packages.ghc7102`, because we prefer that package set internally and
recommend it to our users as their default choice, but ultimately you are free
to compile your Haskell packages with any GHC version you please. The following
command displays the complete list of available compilers:
$ nix-env -f "<nixpkgs>" -qaP -A haskell.compiler
haskell.compiler.ghc6104 ghc-6.10.4
haskell.compiler.ghc6123 ghc-6.12.3
haskell.compiler.ghc704 ghc-7.0.4
haskell.compiler.ghc722 ghc-7.2.2
haskell.compiler.ghc742 ghc-7.4.2
haskell.compiler.ghc763 ghc-7.6.3
haskell.compiler.ghc784 ghc-7.8.4
haskell.compiler.ghc7102 ghc-7.10.2
haskell.compiler.ghcHEAD ghc-7.11.20150402
haskell.compiler.ghcNokinds ghc-nokinds-7.11.20150704
haskell.compiler.ghcjs ghcjs-0.1.0
haskell.compiler.jhc jhc-0.8.2
haskell.compiler.uhc uhc-1.1.9.0
We have no package sets for `jhc` or `uhc` yet, unfortunately, but for every
version of GHC listed above, there exists a package set based on that compiler.
Also, the attributes `haskell.compiler.ghcXYC` and
`haskell.packages.ghcXYC.ghc` are synonymous for the sake of convenience.
## How to create a development environment
### How to install a compiler
A simple development environment consists of a Haskell compiler and one or both
of the tools `cabal-install` and `stack`. We saw in section
[How to install Haskell packages] how you can install those programs into your
user profile:
$ nix-env -f "<nixpkgs>" -iA haskellPackages.ghc haskellPackages.cabal-install
Instead of the default package set `haskellPackages`, you can also use the more
precise name `haskell.compiler.ghc7102`, which has the advantage that it refers
to the same GHC version regardless of what Nixpkgs considers "default" at any
given time.
Once you've made those tools available in `$PATH`, it's possible to build
Hackage packages the same way people without access to Nix do it all the time:
$ cabal get lens-4.11 && cd lens-4.11
$ cabal install -j --dependencies-only
$ cabal configure
$ cabal build
If you enjoy working with Cabal sandboxes, then that's entirely possible too:
just execute the command
$ cabal sandbox init
before installing the required dependencies.
The `nix-shell` utility makes it easy to switch to a different compiler
version; just enter the Nix shell environment with the command
$ nix-shell -p haskell.compiler.ghc784
to bring GHC 7.8.4 into `$PATH`. Alternatively, you can use Stack instead of
`nix-shell` directly to select compiler versions and other build tools
per-project. It uses `nix-shell` under the hood when Nix support is turned on.
See [How to build a Haskell project using Stack].
If you're using `cabal-install`, re-running `cabal configure` inside the spawned
shell switches your build to use that compiler instead. If you're working on
a project that doesn't depend on any additional system libraries outside of GHC,
then it's even sufficient to just run the `cabal configure` command inside of
the shell:
$ nix-shell -p haskell.compiler.ghc784 --command "cabal configure"
Afterwards, all other commands like `cabal build` work just fine in any shell
environment, because the configure phase recorded the absolute paths to all
required tools like GHC in its build configuration inside of the `dist/`
directory. Please note, however, that `nix-collect-garbage` can break such an
environment because the Nix store paths created by `nix-shell` aren't "alive"
anymore once `nix-shell` has terminated. If you find that your Haskell builds
no longer work after garbage collection, then you'll have to re-run `cabal
configure` inside of a new `nix-shell` environment.
### How to install a compiler with libraries
GHC expects to find all installed libraries inside of its own `lib` directory.
This approach works fine on traditional Unix systems, but it doesn't work for
Nix, because GHC's store path is immutable once it's built. We cannot install
additional libraries into that location. As a consequence, our copies of GHC
don't know any packages except their own core libraries, like `base`,
`containers`, `Cabal`, etc.
We can register additional libraries to GHC, however, using a special build
function called `ghcWithPackages`. That function expects one argument: a
function that maps from an attribute set of Haskell packages to a list of
packages, which determines the libraries known to that particular version of
GHC. For example, the Nix expression `ghcWithPackages (pkgs: [pkgs.mtl])`
generates a copy of GHC that has the `mtl` library registered in addition to
its normal core packages:
$ nix-shell -p "haskellPackages.ghcWithPackages (pkgs: [pkgs.mtl])"
[nix-shell:~]$ ghc-pkg list mtl
/nix/store/zy79...-ghc-7.10.2/lib/ghc-7.10.2/package.conf.d:
mtl-2.2.1
This function allows users to define their own development environment by means
of an override. After adding the following snippet to `~/.nixpkgs/config.nix`,
{
packageOverrides = super: let self = super.pkgs; in
{
myHaskellEnv = self.haskell.packages.ghc7102.ghcWithPackages
(haskellPackages: with haskellPackages; [
# libraries
arrows async cgi criterion
# tools
cabal-install haskintex
]);
};
}
it's possible to install that compiler with `nix-env -f "<nixpkgs>" -iA
myHaskellEnv`. If you'd like to switch that development environment to a
different version of GHC, just replace the `ghc7102` bit in the previous
definition with the appropriate name. Of course, it's also possible to define
any number of these development environments! (You can't install two of them
into the same profile at the same time, though, because that would result in
file conflicts.)
The generated `ghc` program is a wrapper script that re-directs the real
GHC executable to use a new `lib` directory --- one that we specifically
constructed to contain all those packages the user requested:
$ cat $(type -p ghc)
#! /nix/store/xlxj...-bash-4.3-p33/bin/bash -e
export NIX_GHC=/nix/store/19sm...-ghc-7.10.2/bin/ghc
export NIX_GHCPKG=/nix/store/19sm...-ghc-7.10.2/bin/ghc-pkg
export NIX_GHC_DOCDIR=/nix/store/19sm...-ghc-7.10.2/share/doc/ghc/html
export NIX_GHC_LIBDIR=/nix/store/19sm...-ghc-7.10.2/lib/ghc-7.10.2
exec /nix/store/j50p...-ghc-7.10.2/bin/ghc "-B$NIX_GHC_LIBDIR" "$@"
The variables `$NIX_GHC`, `$NIX_GHCPKG`, etc. point to the *new* store path
`ghcWithPackages` constructed specifically for this environment. The last line
of the wrapper script then executes the real `ghc`, but passes the path to the
new `lib` directory using GHC's `-B` flag.
The purpose of those environment variables is to work around an impurity in the
popular [ghc-paths](http://hackage.haskell.org/package/ghc-paths) library. That
library promises to give its users access to GHC's installation paths. Only,
the library can't possible know that path when it's compiled, because the path
GHC considers its own is determined only much later, when the user configures
it through `ghcWithPackages`. So we [patched
ghc-paths](https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/haskell-modules/patches/ghc-paths-nix.patch)
to return the paths found in those environment variables at run-time rather
than trying to guess them at compile-time.
To make sure that mechanism works properly all the time, we recommend that you
set those variables to meaningful values in your shell environment, too, i.e.
by adding the following code to your `~/.bashrc`:
if type >/dev/null 2>&1 -p ghc; then
eval "$(egrep ^export "$(type -p ghc)")"
fi
If you are certain that you'll use only one GHC environment which is located in
your user profile, then you can use the following code, too, which has the
advantage that it doesn't contain any paths from the Nix store, i.e. those
settings always remain valid even if a `nix-env -u` operation updates the GHC
environment in your profile:
if [ -e ~/.nix-profile/bin/ghc ]; then
export NIX_GHC="$HOME/.nix-profile/bin/ghc"
export NIX_GHCPKG="$HOME/.nix-profile/bin/ghc-pkg"
export NIX_GHC_DOCDIR="$HOME/.nix-profile/share/doc/ghc/html"
export NIX_GHC_LIBDIR="$HOME/.nix-profile/lib/ghc-$($NIX_GHC --numeric-version)"
fi
### How to install a compiler with libraries, hoogle and documentation indexes
If you plan to use your environment for interactive programming, not just
compiling random Haskell code, you might want to replace `ghcWithPackages` in
all the listings above with `ghcWithHoogle`.
This environment generator not only produces an environment with GHC and all
the specified libraries, but also generates a `hoogle` and `haddock` indexes
for all the packages, and provides a wrapper script around `hoogle` binary that
uses all those things. A precise name for this thing would be
"`ghcWithPackagesAndHoogleAndDocumentationIndexes`", which is, regrettably, too
long and scary.
For example, installing the following environment
{
packageOverrides = super: let self = super.pkgs; in
{
myHaskellEnv = self.haskellPackages.ghcWithHoogle
(haskellPackages: with haskellPackages; [
# libraries
arrows async cgi criterion
# tools
cabal-install haskintex
]);
};
}
allows one to browse module documentation index [not too dissimilar to
this](https://downloads.haskell.org/~ghc/latest/docs/html/libraries/index.html)
for all the specified packages and their dependencies by directing a browser of
choice to `~/.nix-profiles/share/doc/hoogle/index.html` (or
`/run/current-system/sw/share/doc/hoogle/index.html` in case you put it in
`environment.systemPackages` in NixOS).
After you've marveled enough at that try adding the following to your
`~/.ghc/ghci.conf`
:def hoogle \s -> return $ ":! hoogle search -cl --count=15 \"" ++ s ++ "\""
:def doc \s -> return $ ":! hoogle search -cl --info \"" ++ s ++ "\""
and test it by typing into `ghci`:
:hoogle a -> a
:doc a -> a
Be sure to note the links to `haddock` files in the output. With any modern and
properly configured terminal emulator you can just click those links to
navigate there.
Finally, you can run
hoogle server -p 8080
and navigate to http://localhost:8080/ for your own local
[Hoogle](https://www.haskell.org/hoogle/). Note, however, that Firefox and
possibly other browsers disallow navigation from `http:` to `file:` URIs for
security reasons, which might be quite an inconvenience. See [this
page](http://kb.mozillazine.org/Links_to_local_pages_do_not_work) for
workarounds.
### How to build a Haskell project using Stack
[Stack][http://haskellstack.org] is a popular build tool for Haskell projects.
It has first-class support for Nix. Stack can optionally use Nix to
automatically select the right version of GHC and other build tools to build,
test and execute apps in an existing project downloaded from somewhere on the
Internet. Pass the `--nix` flag to any `stack` command to do so, e.g.
$ git clone --recursive http://github.com/yesodweb/wai
$ cd wai
$ stack --nix build
If you want `stack` to use Nix by default, you can add a `nix` section to the
`stack.yaml` file, as explained in the [Stack documentation][stack-nix-doc]. For
example:
nix:
enable: true
packages: [pkgconfig zeromq zlib]
The example configuration snippet above tells Stack to create an ad hoc
environment for `nix-shell` as in the below section, in which the `pkgconfig`,
`zeromq` and `zlib` packages from Nixpkgs are available. All `stack` commands
will implicitly be executed inside this ad hoc environment.
Some projects have more sophisticated needs. For examples, some ad hoc
environments might need to expose Nixpkgs packages compiled in a certain way, or
with extra environment variables. In these cases, you'll need a `shell` field
instead of `packages`:
nix:
enable: true
shell-file: shell.nix
For more on how to write a `shell.nix` file see the below section. You'll need
to express a derivation. Note that Nixpkgs ships with a convenience wrapper
function around `mkDerivation` called `haskell.lib.buildStackProject` to help you
create this derivation in exactly the way Stack expects. All of the same inputs
as `mkDerivation` can be provided. For example, to build a Stack project that
including packages that link against a version of the R library compiled with
special options turned on:
with (import <nixpkgs> { });
let R = pkgs.R.override { enableStrictBarrier = true; };
in
haskell.lib.buildStackProject {
name = "HaskellR";
buildInputs = [ R zeromq zlib ];
}
[stack-nix-doc]: http://docs.haskellstack.org/en/stable/nix_integration.html
### How to create ad hoc environments for `nix-shell`
The easiest way to create an ad hoc development environment is to run
`nix-shell` with the appropriate GHC environment given on the command-line:
nix-shell -p "haskellPackages.ghcWithPackages (pkgs: with pkgs; [mtl pandoc])"
For more sophisticated use-cases, however, it's more convenient to save the
desired configuration in a file called `shell.nix` that looks like this:
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }:
let
inherit (nixpkgs) pkgs;
ghc = pkgs.haskell.packages.${compiler}.ghcWithPackages (ps: with ps; [
monad-par mtl
]);
in
pkgs.stdenv.mkDerivation {
name = "my-haskell-env-0";
buildInputs = [ ghc ];
shellHook = "eval $(egrep ^export ${ghc}/bin/ghc)";
}
Now run `nix-shell` --- or even `nix-shell --pure` --- to enter a shell
environment that has the appropriate compiler in `$PATH`. If you use `--pure`,
then add all other packages that your development environment needs into the
`buildInputs` attribute. If you'd like to switch to a different compiler
version, then pass an appropriate `compiler` argument to the expression, i.e.
`nix-shell --argstr compiler ghc784`.
If you need such an environment because you'd like to compile a Hackage package
outside of Nix --- i.e. because you're hacking on the latest version from Git
---, then the package set provides suitable nix-shell environments for you
already! Every Haskell package has an `env` attribute that provides a shell
environment suitable for compiling that particular package. If you'd like to
hack the `lens` library, for example, then you just have to check out the
source code and enter the appropriate environment:
$ cabal get lens-4.11 && cd lens-4.11
Downloading lens-4.11...
Unpacking to lens-4.11/
$ nix-shell "<nixpkgs>" -A haskellPackages.lens.env
[nix-shell:/tmp/lens-4.11]$
At point, you can run `cabal configure`, `cabal build`, and all the other
development commands. Note that you need `cabal-install` installed in your
`$PATH` already to use it here --- the `nix-shell` environment does not provide
it.
## How to create Nix builds for your own private Haskell packages
If your own Haskell packages have build instructions for Cabal, then you can
convert those automatically into build instructions for Nix using the
`cabal2nix` utility, which you can install into your profile by running
`nix-env -i cabal2nix`.
### How to build a stand-alone project
For example, let's assume that you're working on a private project called
`foo`. To generate a Nix build expression for it, change into the project's
top-level directory and run the command:
$ cabal2nix . >foo.nix
Then write the following snippet into a file called `default.nix`:
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }:
nixpkgs.pkgs.haskell.packages.${compiler}.callPackage ./foo.nix { }
Finally, store the following code in a file called `shell.nix`:
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }:
(import ./default.nix { inherit nixpkgs compiler; }).env
At this point, you can run `nix-build` to have Nix compile your project and
install it into a Nix store path. The local directory will contain a symlink
called `result` after `nix-build` returns that points into that location. Of
course, passing the flag `--argstr compiler ghc763` allows switching the build
to any version of GHC currently supported.
Furthermore, you can call `nix-shell` to enter an interactive development
environment in which you can use `cabal configure` and `cabal build` to develop
your code. That environment will automatically contain a proper GHC derivation
with all the required libraries registered as well as all the system-level
libraries your package might need.
If your package does not depend on any system-level libraries, then it's
sufficient to run
$ nix-shell --command "cabal configure"
once to set up your build. `cabal-install` determines the absolute paths to all
resources required for the build and writes them into a config file in the
`dist/` directory. Once that's done, you can run `cabal build` and any other
command for that project even outside of the `nix-shell` environment. This
feature is particularly nice for those of us who like to edit their code with
an IDE, like Emacs' `haskell-mode`, because it's not necessary to start Emacs
inside of nix-shell just to make it find out the necessary settings for
building the project; `cabal-install` has already done that for us.
If you want to do some quick-and-dirty hacking and don't want to bother setting
up a `default.nix` and `shell.nix` file manually, then you can use the
`--shell` flag offered by `cabal2nix` to have it generate a stand-alone
`nix-shell` environment for you. With that feature, running
$ cabal2nix --shell . >shell.nix
$ nix-shell --command "cabal configure"
is usually enough to set up a build environment for any given Haskell package.
You can even use that generated file to run `nix-build`, too:
$ nix-build shell.nix
### How to build projects that depend on each other
If you have multiple private Haskell packages that depend on each other, then
you'll have to register those packages in the Nixpkgs set to make them visible
for the dependency resolution performed by `callPackage`. First of all, change
into each of your projects top-level directories and generate a `default.nix`
file with `cabal2nix`:
$ cd ~/src/foo && cabal2nix . >default.nix
$ cd ~/src/bar && cabal2nix . >default.nix
Then edit your `~/.nixpkgs/config.nix` file to register those builds in the
default Haskell package set:
{
packageOverrides = super: let self = super.pkgs; in
{
haskellPackages = super.haskellPackages.override {
overrides = self: super: {
foo = self.callPackage ../src/foo {};
bar = self.callPackage ../src/bar {};
};
};
};
}
Once that's accomplished, `nix-env -f "<nixpkgs>" -qA haskellPackages` will
show your packages like any other package from Hackage, and you can build them
$ nix-build "<nixpkgs>" -A haskellPackages.foo
or enter an interactive shell environment suitable for building them:
$ nix-shell "<nixpkgs>" -A haskellPackages.bar.env
## Miscellaneous Topics
### How to build with profiling enabled
Every Haskell package set takes a function called `overrides` that you can use
to manipulate the package as much as you please. One useful application of this
feature is to replace the default `mkDerivation` function with one that enables
library profiling for all packages. To accomplish that, add configure the
following snippet in your `~/.nixpkgs/config.nix` file:
{
packageOverrides = super: let self = super.pkgs; in
{
profiledHaskellPackages = self.haskellPackages.override {
overrides = self: super: {
mkDerivation = args: super.mkDerivation (args // {
enableLibraryProfiling = true;
});
};
};
};
}
Then, replace instances of `haskellPackages` in the `cabal2nix`-generated
`default.nix` or `shell.nix` files with `profiledHaskellPackages`.
### How to override package versions in a compiler-specific package set
Nixpkgs provides the latest version of
[`ghc-events`](http://hackage.haskell.org/package/ghc-events), which is 0.4.4.0
at the time of this writing. This is fine for users of GHC 7.10.x, but GHC
7.8.4 cannot compile that binary. Now, one way to solve that problem is to
register an older version of `ghc-events` in the 7.8.x-specific package set.
The first step is to generate Nix build instructions with `cabal2nix`:
$ cabal2nix cabal://ghc-events-0.4.3.0 >~/.nixpkgs/ghc-events-0.4.3.0.nix
Then add the override in `~/.nixpkgs/config.nix`:
{
packageOverrides = super: let self = super.pkgs; in
{
haskell = super.haskell // {
packages = super.haskell.packages // {
ghc784 = super.haskell.packages.ghc784.override {
overrides = self: super: {
ghc-events = self.callPackage ./ghc-events-0.4.3.0.nix {};
};
};
};
};
};
}
This code is a little crazy, no doubt, but it's necessary because the intuitive
version
haskell.packages.ghc784 = super.haskell.packages.ghc784.override {
overrides = self: super: {
ghc-events = self.callPackage ./ghc-events-0.4.3.0.nix {};
};
};
doesn't do what we want it to: that code replaces the `haskell` package set in
Nixpkgs with one that contains only one entry,`packages`, which contains only
one entry `ghc784`. This override loses the `haskell.compiler` set, and it
loses the `haskell.packages.ghcXYZ` sets for all compilers but GHC 7.8.4. To
avoid that problem, we have to perform the convoluted little dance from above,
iterating over each step in hierarchy.
Once it's accomplished, however, we can install a variant of `ghc-events`
that's compiled with GHC 7.8.4:
nix-env -f "<nixpkgs>" -iA haskell.packages.ghc784.ghc-events
Unfortunately, it turns out that this build fails again while executing the
test suite! Apparently, the release archive on Hackage is missing some data
files that the test suite requires, so we cannot run it. We accomplish that by
re-generating the Nix expression with the `--no-check` flag:
$ cabal2nix --no-check cabal://ghc-events-0.4.3.0 >~/.nixpkgs/ghc-events-0.4.3.0.nix
Now the builds succeeds.
Of course, in the concrete example of `ghc-events` this whole exercise is not
an ideal solution, because `ghc-events` can analyze the output emitted by any
version of GHC later than 6.12 regardless of the compiler version that was used
to build the `ghc-events' executable, so strictly speaking there's no reason to
prefer one built with GHC 7.8.x in the first place. However, for users who
cannot use GHC 7.10.x at all for some reason, the approach of downgrading to an
older version might be useful.
### How to recover from GHC's infamous non-deterministic library ID bug
GHC and distributed build farms don't get along well:
https://ghc.haskell.org/trac/ghc/ticket/4012
When you see an error like this one
package foo-0.7.1.0 is broken due to missing package
text-1.2.0.4-98506efb1b9ada233bb5c2b2db516d91
then you have to download and re-install `foo` and all its dependents from
scratch:
# nix-store -q --referrers /nix/store/*-haskell-text-1.2.0.4 \
| xargs -L 1 nix-store --repair-path --option binary-caches http://hydra.nixos.org
If you're using additional Hydra servers other than `hydra.nixos.org`, then it
might be necessary to purge the local caches that store data from those
machines to disable these binary channels for the duration of the previous
command, i.e. by running:
rm /nix/var/nix/binary-cache-v3.sqlite
rm /nix/var/nix/manifests/*
rm /nix/var/nix/channel-cache/*
### Builds on Darwin fail with `math.h` not found
Users of GHC on Darwin have occasionally reported that builds fail, because the
compiler complains about a missing include file:
fatal error: 'math.h' file not found
The issue has been discussed at length in [ticket
6390](https://github.com/NixOS/nixpkgs/issues/6390), and so far no good
solution has been proposed. As a work-around, users who run into this problem
can configure the environment variables
export NIX_CFLAGS_COMPILE="-idirafter /usr/include"
export NIX_CFLAGS_LINK="-L/usr/lib"
in their `~/.bashrc` file to avoid the compiler error.
### Builds using Stack complain about missing system libraries
-- While building package zlib-0.5.4.2 using:
runhaskell -package=Cabal-1.22.4.0 -clear-package-db [... lots of flags ...]
Process exited with code: ExitFailure 1
Logs have been written to: /home/foo/src/stack-ide/.stack-work/logs/zlib-0.5.4.2.log
Configuring zlib-0.5.4.2...
Setup.hs: Missing dependency on a foreign library:
* Missing (or bad) header file: zlib.h
This problem can usually be solved by installing the system package that
provides this library (you may need the "-dev" version). If the library is
already installed but in a non-standard location then you can use the flags
--extra-include-dirs= and --extra-lib-dirs= to specify where it is.
If the header file does exist, it may contain errors that are caught by the C
compiler at the preprocessing stage. In this case you can re-run configure
with the verbosity flag -v3 to see the error messages.
When you run the build inside of the nix-shell environment, the system
is configured to find libz.so without any special flags -- the compiler
and linker "just know" how to find it. Consequently, Cabal won't record
any search paths for libz.so in the package description, which means
that the package works fine inside of nix-shell, but once you leave the
shell the shared object can no longer be found. That issue is by no
means specific to Stack: you'll have that problem with any other
Haskell package that's built inside of nix-shell but run outside of that
environment.
You can remedy this issue in several ways. The easiest is to add a `nix` section
to the `stack.yaml` like the following:
nix:
enable: true
packages: [ zlib ]
Stack's Nix support knows to add `${zlib}/lib` and `${zlib}/include` as an
`--extra-lib-dirs` and `extra-include-dirs`, respectively. Alternatively, you
can achieve the same effect by hand. First of all, run
$ nix-build --no-out-link "<nixpkgs>" -A zlib
/nix/store/alsvwzkiw4b7ip38l4nlfjijdvg3fvzn-zlib-1.2.8
to find out the store path of the system's zlib library. Now, you can
1) add that path (plus a "/lib" suffix) to your $LD_LIBRARY_PATH
environment variable to make sure your system linker finds libz.so
automatically. It's no pretty solution, but it will work.
2) As a variant of (1), you can also install any number of system
libraries into your user's profile (or some other profile) and point
$LD_LIBRARY_PATH to that profile instead, so that you don't have to
list dozens of those store paths all over the place.
3) The solution I prefer is to call stack with an appropriate
--extra-lib-dirs flag like so:
$ stack --extra-lib-dirs=/nix/store/alsvwzkiw4b7ip38l4nlfjijdvg3fvzn-zlib-1.2.8/lib build
Typically, you'll need --extra-include-dirs as well. It's possible
to add those flag to the project's "stack.yaml" or your user's
global "~/.stack/global/stack.yaml" file so that you don't have to
specify them manually every time. But again, you're likely better off using
Stack's Nix support instead.
The same thing applies to `cabal configure`, of course, if you're
building with `cabal-install` instead of Stack.
### Creating statically linked binaries
There are two levels of static linking. The first option is to configure the
build with the Cabal flag `--disable-executable-dynamic`. In Nix expressions,
this can be achieved by setting the attribute:
enableSharedExecutables = false;
That gives you a binary with statically linked Haskell libraries and
dynamically linked system libraries.
To link both Haskell libraries and system libraries statically, the additional
flags `--ghc-option=-optl=-static --ghc-option=-optl=-pthread` need to be used.
In Nix, this is accomplished with:
configureFlags = [ "--ghc-option=-optl=-static" "--ghc-option=-optl=-pthread" ];
It's important to realize, however, that most system libraries in Nix are built
as shared libraries only, i.e. there is just no static library available that
Cabal could link!
## Other resources
- The Youtube video [Nix Loves Haskell](https://www.youtube.com/watch?v=BsBhi_r-OeE)
provides an introduction into Haskell NG aimed at beginners. The slides are
available at http://cryp.to/nixos-meetup-3-slides.pdf and also -- in a form
ready for cut & paste -- at
https://github.com/NixOS/cabal2nix/blob/master/doc/nixos-meetup-3-slides.md.
- Another Youtube video is [Escaping Cabal Hell with Nix](https://www.youtube.com/watch?v=mQd3s57n_2Y),
which discusses the subject of Haskell development with Nix but also provides
a basic introduction to Nix as well, i.e. it's suitable for viewers with
almost no prior Nix experience.
- Oliver Charles wrote a very nice [Tutorial how to develop Haskell packages with Nix](http://wiki.ocharles.org.uk/Nix).
- The *Journey into the Haskell NG infrastructure* series of postings
describe the new Haskell infrastructure in great detail:
- [Part 1](http://lists.science.uu.nl/pipermail/nix-dev/2015-January/015591.html)
explains the differences between the old and the new code and gives
instructions how to migrate to the new setup.
- [Part 2](http://lists.science.uu.nl/pipermail/nix-dev/2015-January/015608.html)
looks in-depth at how to tweak and configure your setup by means of
overrides.
- [Part 3](http://lists.science.uu.nl/pipermail/nix-dev/2015-April/016912.html)
describes the infrastructure that keeps the Haskell package set in Nixpkgs
up-to-date.

View File

@@ -1,51 +0,0 @@
---
title: Introduction
author: Frederik Rietdijk
date: 2015-11-25
---
# Introduction
The Nix Packages collection (Nixpkgs) is a set of thousands of packages for the
[Nix package manager](http://nixos.org/nix/), released under a
[permissive MIT/X11 license](https://github.com/NixOS/nixpkgs/blob/master/COPYING).
Packages are available for several platforms, and can be used with the Nix
package manager on most GNU/Linux distributions as well as NixOS.
This manual primarily describes how to write packages for the Nix Packages collection
(Nixpkgs). Thus its mainly for packagers and developers who want to add packages to
Nixpkgs. If you like to learn more about the Nix package manager and the Nix
expression language, then you are kindly referred to the [Nix manual](http://nixos.org/nix/manual/).
## Overview of Nixpkgs
Nix expressions describe how to build packages from source and are collected in
the [nixpkgs repository](https://github.com/NixOS/nixpkgs). Also included in the
collection are Nix expressions for
[NixOS modules](http://nixos.org/nixos/manual/index.html#sec-writing-modules).
With these expressions the Nix package manager can build binary packages.
Packages, including the Nix packages collection, are distributed through
[channels](http://nixos.org/nix/manual/#sec-channels). The collection is
distributed for users of Nix on non-NixOS distributions through the channel
`nixpkgs`. Users of NixOS generally use one of the `nixos-*` channels, e.g.
`nixos-16.03`, which includes all packages and modules for the stable NixOS
16.03. Stable NixOS releases are generally only given
security updates. More up to date packages and modules are available via the
`nixos-unstable` channel.
Both `nixos-unstable` and `nixpkgs` follow the `master` branch of the Nixpkgs
repository, although both do lag the `master` branch by generally
[a couple of days](http://howoldis.herokuapp.com/). Updates to a channel are
distributed as soon as all tests for that channel pass, e.g.
[this table](http://hydra.nixos.org/job/nixpkgs/trunk/unstable#tabs-constituents)
shows the status of tests for the `nixpkgs` channel.
The tests are conducted by a cluster called [Hydra](http://nixos.org/hydra/),
which also builds binary packages from the Nix expressions in Nixpkgs for
`x86_64-linux`, `i686-linux` and `x86_64-darwin`.
The binaries are made available via a [binary cache](https://cache.nixos.org).
The current Nix expressions of the channels are available in the
[`nixpkgs-channels`](https://github.com/NixOS/nixpkgs-channels) repository,
which has branches corresponding to the available channels.

48
doc/introduction.md Normal file
View File

@@ -0,0 +1,48 @@
---
title: Introduction
author: Frederik Rietdijk
date: 2015-11-25
---
# Introduction
The Nix Packages collection (Nixpkgs) is a set of over 30,000 packages for the
[Nix package manager](http://nixos.org/nix/), released under a [permissive MIT/X11 license](https://github.com/NixOS/nixpkgs/blob/master/COPYING).
Packages are available for several architectures, and can be used with the Nix package manager
on most GNU/Linux distributions as well as NixOS.
This manual describes how to write packages for the Nix Packages collection
(Nixpkgs). Thus its for packagers and developers who want to add packages to
Nixpkgs. If you like to learn more about the Nix package manager and the Nix
expression language, then you are kindly referred to the [Nix manual](http://nixos.org/nix/manual/).
## Overview of Nixpkgs
Nix expressions describe how to build packages from source and are collected in
the [nixpkgs repository](https://github.com/NixOS/nixpkgs). Also included in the
collection are Nix expressions for [NixOS modules](http://nixos.org/nixos/manual/index.html#sec-writing-modules). With
these expressions the Nix package manager can build binary packages.
Packages, including the Nix packages collection, are distributed through
[channels](http://nixos.org/nix/manual/#sec-channels). The collection is
distributed for users of Nix on non-NixOS distributions through the channel
`nixpkgs`. Users of NixOS generally use one of the `nixos-*` channels, e.g.
`nixos-15.09`, which includes all packages and modules for the stable NixOS
15.09. The channels of the stable NixOS releases are generally only given
security updates. More up to date packages and modules are available via the
`nixos-unstable` channel.
Both `nixos-unstable` and `nixpkgs` follow the `master` branch of the Nixpkgs
repository, although both do lag the `master` branch by generally [a couple of days](http://howoldis.herokuapp.com/). Updates to a channel are distributed as
soon as all tests for that channel pass, e.g. [this table](http://hydra.nixos.org/job/nixpkgs/trunk/unstable#tabs-constituents)
shows the status of tests for the `nixpkgs` channel.
The tests are conducted by a cluster called [Hydra](http://nixos.org/hydra/),
which also builds binary packages from the Nix expressions in Nixpkgs. As soon
as a channel is updated, the binaries are made available via a [binary cache](https://cache.nixos.org). Until the channel updates, binaries that have
already been built, are available via [Hydra's binary cache](https://hydra.nixos.org).
The current Nix expressions of the channels are available in the
[`nixpkgs-channels`](https://github.com/NixOS/nixpkgs-channels) repository,
which has branches corresponding to the available channels. There is also the
Nixpkgs Monitor which keeps track of updates and security vulnerabilities.

View File

@@ -1,528 +0,0 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="sec-beam">
<title>BEAM Languages (Erlang, Elixir &amp; LFE)</title>
<section xml:id="beam-introduction">
<title>Introduction</title>
<para>
In this document and related Nix expressions, we use the term,
<emphasis>BEAM</emphasis>, to describe the environment. BEAM is the name of
the Erlang Virtual Machine and, as far as we're concerned, from a packaging
perspective, all languages that run on the BEAM are interchangeable. That
which varies, like the build system, is transparent to users of any given
BEAM package, so we make no distinction.
</para>
</section>
<section xml:id="beam-structure">
<title>Structure</title>
<para>
All BEAM-related expressions are available via the top-level
<literal>beam</literal> attribute, which includes:
</para>
<itemizedlist>
<listitem>
<para>
<literal>interpreters</literal>: a set of compilers running on the BEAM,
including multiple Erlang/OTP versions
(<literal>beam.interpreters.erlangR19</literal>, etc), Elixir
(<literal>beam.interpreters.elixir</literal>) and LFE
(<literal>beam.interpreters.lfe</literal>).
</para>
</listitem>
<listitem>
<para>
<literal>packages</literal>: a set of package sets, each compiled with a
specific Erlang/OTP version, e.g.
<literal>beam.packages.erlangR19</literal>.
</para>
</listitem>
</itemizedlist>
<para>
The default Erlang compiler, defined by
<literal>beam.interpreters.erlang</literal>, is aliased as
<literal>erlang</literal>. The default BEAM package set is defined by
<literal>beam.packages.erlang</literal> and aliased at the top level as
<literal>beamPackages</literal>.
</para>
<para>
To create a package set built with a custom Erlang version, use the lambda,
<literal>beam.packagesWith</literal>, which accepts an Erlang/OTP derivation
and produces a package set similar to
<literal>beam.packages.erlang</literal>.
</para>
<para>
Many Erlang/OTP distributions available in
<literal>beam.interpreters</literal> have versions with ODBC and/or Java
enabled. For example, there's
<literal>beam.interpreters.erlangR19_odbc_javac</literal>, which corresponds
to <literal>beam.interpreters.erlangR19</literal>.
</para>
<para xml:id="erlang-call-package">
We also provide the lambda,
<literal>beam.packages.erlang.callPackage</literal>, which simplifies
writing BEAM package definitions by injecting all packages from
<literal>beam.packages.erlang</literal> into the top-level context.
</para>
</section>
<section xml:id="build-tools">
<title>Build Tools</title>
<section xml:id="build-tools-rebar3">
<title>Rebar3</title>
<para>
By default, Rebar3 wants to manage its own dependencies. This is perfectly
acceptable in the normal, non-Nix setup, but in the Nix world, it is not.
To rectify this, we provide two versions of Rebar3:
<itemizedlist>
<listitem>
<para>
<literal>rebar3</literal>: patched to remove the ability to download
anything. When not running it via <literal>nix-shell</literal> or
<literal>nix-build</literal>, it's probably not going to work as
desired.
</para>
</listitem>
<listitem>
<para>
<literal>rebar3-open</literal>: the normal, unmodified Rebar3. It should
work exactly as would any other version of Rebar3. Any Erlang package
should rely on <literal>rebar3</literal> instead. See
<xref
linkend="rebar3-packages"/>.
</para>
</listitem>
</itemizedlist>
</para>
</section>
<section xml:id="build-tools-other">
<title>Mix &amp; Erlang.mk</title>
<para>
Both Mix and Erlang.mk work exactly as expected. There is a bootstrap
process that needs to be run for both, however, which is supported by the
<literal>buildMix</literal> and <literal>buildErlangMk</literal>
derivations, respectively.
</para>
</section>
</section>
<section xml:id="how-to-install-beam-packages">
<title>How to Install BEAM Packages</title>
<para>
BEAM packages are not registered at the top level, simply because they are
not relevant to the vast majority of Nix users. They are installable using
the <literal>beam.packages.erlang</literal> attribute set (aliased as
<literal>beamPackages</literal>), which points to packages built by the
default Erlang/OTP version in Nixpkgs, as defined by
<literal>beam.interpreters.erlang</literal>. To list the available packages
in <literal>beamPackages</literal>, use the following command:
</para>
<programlisting>
$ nix-env -f &quot;&lt;nixpkgs&gt;&quot; -qaP -A beamPackages
beamPackages.esqlite esqlite-0.2.1
beamPackages.goldrush goldrush-0.1.7
beamPackages.ibrowse ibrowse-4.2.2
beamPackages.jiffy jiffy-0.14.5
beamPackages.lager lager-3.0.2
beamPackages.meck meck-0.8.3
beamPackages.rebar3-pc pc-1.1.0
</programlisting>
<para>
To install any of those packages into your profile, refer to them by their
attribute path (first column):
</para>
<programlisting>
$ nix-env -f &quot;&lt;nixpkgs&gt;&quot; -iA beamPackages.ibrowse
</programlisting>
<para>
The attribute path of any BEAM package corresponds to the name of that
particular package in <link xlink:href="https://hex.pm">Hex</link> or its
OTP Application/Release name.
</para>
</section>
<section xml:id="packaging-beam-applications">
<title>Packaging BEAM Applications</title>
<section xml:id="packaging-erlang-applications">
<title>Erlang Applications</title>
<section xml:id="rebar3-packages">
<title>Rebar3 Packages</title>
<para>
The Nix function, <literal>buildRebar3</literal>, defined in
<literal>beam.packages.erlang.buildRebar3</literal> and aliased at the top
level, can be used to build a derivation that understands how to build a
Rebar3 project. For example, we can build
<link
xlink:href="https://github.com/erlang-nix/hex2nix">hex2nix</link>
as follows:
</para>
<programlisting>
{ stdenv, fetchFromGitHub, buildRebar3, ibrowse, jsx, erlware_commons }:
buildRebar3 rec {
name = "hex2nix";
version = "0.0.1";
src = fetchFromGitHub {
owner = "ericbmerritt";
repo = "hex2nix";
rev = "${version}";
sha256 = "1w7xjidz1l5yjmhlplfx7kphmnpvqm67w99hd2m7kdixwdxq0zqg";
};
beamDeps = [ ibrowse jsx erlware_commons ];
}
</programlisting>
<para>
Such derivations are callable with
<literal>beam.packages.erlang.callPackage</literal> (see
<xref
linkend="erlang-call-package"/>). To call this package using
the normal <literal>callPackage</literal>, refer to dependency packages
via <literal>beamPackages</literal>, e.g.
<literal>beamPackages.ibrowse</literal>.
</para>
<para>
Notably, <literal>buildRebar3</literal> includes
<literal>beamDeps</literal>, while <literal>stdenv.mkDerivation</literal>
does not. BEAM dependencies added there will be correctly handled by the
system.
</para>
<para>
If a package needs to compile native code via Rebar3's port compilation
mechanism, add <literal>compilePort = true;</literal> to the derivation.
</para>
</section>
<section xml:id="erlang-mk-packages">
<title>Erlang.mk Packages</title>
<para>
Erlang.mk functions similarly to Rebar3, except we use
<literal>buildErlangMk</literal> instead of
<literal>buildRebar3</literal>.
</para>
<programlisting>
{ buildErlangMk, fetchHex, cowlib, ranch }:
buildErlangMk {
name = "cowboy";
version = "1.0.4";
src = fetchHex {
pkg = "cowboy";
version = "1.0.4";
sha256 = "6a0edee96885fae3a8dd0ac1f333538a42e807db638a9453064ccfdaa6b9fdac";
};
beamDeps = [ cowlib ranch ];
meta = {
description = ''
Small, fast, modular HTTP server written in Erlang
'';
license = stdenv.lib.licenses.isc;
homepage = https://github.com/ninenines/cowboy;
};
}
</programlisting>
</section>
<section xml:id="mix-packages">
<title>Mix Packages</title>
<para>
Mix functions similarly to Rebar3, except we use
<literal>buildMix</literal> instead of <literal>buildRebar3</literal>.
</para>
<programlisting>
{ buildMix, fetchHex, plug, absinthe }:
buildMix {
name = "absinthe_plug";
version = "1.0.0";
src = fetchHex {
pkg = "absinthe_plug";
version = "1.0.0";
sha256 = "08459823fe1fd4f0325a8bf0c937a4520583a5a26d73b193040ab30a1dfc0b33";
};
beamDeps = [ plug absinthe ];
meta = {
description = ''
A plug for Absinthe, an experimental GraphQL toolkit
'';
license = stdenv.lib.licenses.bsd3;
homepage = https://github.com/CargoSense/absinthe_plug;
};
}
</programlisting>
<para>
Alternatively, we can use <literal>buildHex</literal> as a shortcut:
</para>
<programlisting>
{ buildHex, buildMix, plug, absinthe }:
buildHex {
name = "absinthe_plug";
version = "1.0.0";
sha256 = "08459823fe1fd4f0325a8bf0c937a4520583a5a26d73b193040ab30a1dfc0b33";
builder = buildMix;
beamDeps = [ plug absinthe ];
meta = {
description = ''
A plug for Absinthe, an experimental GraphQL toolkit
'';
license = stdenv.lib.licenses.bsd3;
homepage = https://github.com/CargoSense/absinthe_plug;
};
}
</programlisting>
</section>
</section>
</section>
<section xml:id="how-to-develop">
<title>How to Develop</title>
<section xml:id="accessing-an-environment">
<title>Accessing an Environment</title>
<para>
Often, we simply want to access a valid environment that contains a
specific package and its dependencies. We can accomplish that with the
<literal>env</literal> attribute of a derivation. For example, let's say we
want to access an Erlang REPL with <literal>ibrowse</literal> loaded up. We
could do the following:
</para>
<programlisting>
$ nix-shell -A beamPackages.ibrowse.env --run "erl"
Erlang/OTP 18 [erts-7.0] [source] [64-bit] [smp:4:4] [async-threads:10] [hipe] [kernel-poll:false]
Eshell V7.0 (abort with ^G)
1> m(ibrowse).
Module: ibrowse
MD5: 3b3e0137d0cbb28070146978a3392945
Compiled: January 10 2016, 23:34
Object file: /nix/store/g1rlf65rdgjs4abbyj4grp37ry7ywivj-ibrowse-4.2.2/lib/erlang/lib/ibrowse-4.2.2/ebin/ibrowse.beam
Compiler options: [{outdir,"/tmp/nix-build-ibrowse-4.2.2.drv-0/hex-source-ibrowse-4.2.2/_build/default/lib/ibrowse/ebin"},
debug_info,debug_info,nowarn_shadow_vars,
warn_unused_import,warn_unused_vars,warnings_as_errors,
{i,"/tmp/nix-build-ibrowse-4.2.2.drv-0/hex-source-ibrowse-4.2.2/_build/default/lib/ibrowse/include"}]
Exports:
add_config/1 send_req_direct/7
all_trace_off/0 set_dest/3
code_change/3 set_max_attempts/3
get_config_value/1 set_max_pipeline_size/3
get_config_value/2 set_max_sessions/3
get_metrics/0 show_dest_status/0
get_metrics/2 show_dest_status/1
handle_call/3 show_dest_status/2
handle_cast/2 spawn_link_worker_process/1
handle_info/2 spawn_link_worker_process/2
init/1 spawn_worker_process/1
module_info/0 spawn_worker_process/2
module_info/1 start/0
rescan_config/0 start_link/0
rescan_config/1 stop/0
send_req/3 stop_worker_process/1
send_req/4 stream_close/1
send_req/5 stream_next/1
send_req/6 terminate/2
send_req_direct/4 trace_off/0
send_req_direct/5 trace_off/2
send_req_direct/6 trace_on/0
trace_on/2
ok
2>
</programlisting>
<para>
Notice the <literal>-A beamPackages.ibrowse.env</literal>. That is the key
to this functionality.
</para>
</section>
<section xml:id="creating-a-shell">
<title>Creating a Shell</title>
<para>
Getting access to an environment often isn't enough to do real development.
Usually, we need to create a <literal>shell.nix</literal> file and do our
development inside of the environment specified therein. This file looks a
lot like the packaging described above, except that <literal>src</literal>
points to the project root and we call the package directly.
</para>
<programlisting>
{ pkgs ? import &quot;&lt;nixpkgs&quot;&gt; {} }:
with pkgs;
let
f = { buildRebar3, ibrowse, jsx, erlware_commons }:
buildRebar3 {
name = "hex2nix";
version = "0.1.0";
src = ./.;
beamDeps = [ ibrowse jsx erlware_commons ];
};
drv = beamPackages.callPackage f {};
in
drv
</programlisting>
<section xml:id="building-in-a-shell">
<title>Building in a Shell (for Mix Projects)</title>
<para>
We can leverage the support of the derivation, irrespective of the build
derivation, by calling the commands themselves.
</para>
<programlisting>
# =============================================================================
# Variables
# =============================================================================
NIX_TEMPLATES := "$(CURDIR)/nix-templates"
TARGET := "$(PREFIX)"
PROJECT_NAME := thorndyke
NIXPKGS=../nixpkgs
NIX_PATH=nixpkgs=$(NIXPKGS)
NIX_SHELL=nix-shell -I "$(NIX_PATH)" --pure
# =============================================================================
# Rules
# =============================================================================
.PHONY= all test clean repl shell build test analyze configure install \
test-nix-install publish plt analyze
all: build
guard-%:
@ if [ "${${*}}" == "" ]; then \
echo "Environment variable $* not set"; \
exit 1; \
fi
clean:
rm -rf _build
rm -rf .cache
repl:
$(NIX_SHELL) --run "iex -pa './_build/prod/lib/*/ebin'"
shell:
$(NIX_SHELL)
configure:
$(NIX_SHELL) --command 'eval "$$configurePhase"'
build: configure
$(NIX_SHELL) --command 'eval "$$buildPhase"'
install:
$(NIX_SHELL) --command 'eval "$$installPhase"'
test:
$(NIX_SHELL) --command 'mix test --no-start --no-deps-check'
plt:
$(NIX_SHELL) --run "mix dialyzer.plt --no-deps-check"
analyze: build plt
$(NIX_SHELL) --run "mix dialyzer --no-compile"
</programlisting>
<para>
Using a <literal>shell.nix</literal> as described (see
<xref
linkend="creating-a-shell"/>) should just work. Aside from
<literal>test</literal>, <literal>plt</literal>, and
<literal>analyze</literal>, the Make targets work just fine for all of the
build derivations.
</para>
</section>
</section>
</section>
<section xml:id="generating-packages-from-hex-with-hex2nix">
<title>Generating Packages from Hex with <literal>hex2nix</literal></title>
<para>
Updating the <link xlink:href="https://hex.pm">Hex</link> package set
requires
<link
xlink:href="https://github.com/erlang-nix/hex2nix">hex2nix</link>.
Given the path to the Erlang modules (usually
<literal>pkgs/development/erlang-modules</literal>), it will dump a file
called <literal>hex-packages.nix</literal>, containing all the packages that
use a recognized build system in
<link
xlink:href="https://hex.pm">Hex</link>. It can't be determined,
however, whether every package is buildable.
</para>
<para>
To make life easier for our users, try to build every
<link
xlink:href="https://hex.pm">Hex</link> package and remove those
that fail. To do that, simply run the following command in the root of your
<literal>nixpkgs</literal> repository:
</para>
<programlisting>
$ nix-build -A beamPackages
</programlisting>
<para>
That will attempt to build every package in <literal>beamPackages</literal>.
Then manually remove those that fail. Hopefully, someone will improve
<link
xlink:href="https://github.com/erlang-nix/hex2nix">hex2nix</link>
in the future to automate the process.
</para>
</section>
</section>

View File

@@ -1,229 +0,0 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="sec-bower">
<title>Bower</title>
<para>
<link xlink:href="http://bower.io">Bower</link> is a package manager for web
site front-end components. Bower packages (comprising of build artefacts and
sometimes sources) are stored in <command>git</command> repositories,
typically on Github. The package registry is run by the Bower team with
package metadata coming from the <filename>bower.json</filename> file within
each package.
</para>
<para>
The end result of running Bower is a <filename>bower_components</filename>
directory which can be included in the web app's build process.
</para>
<para>
Bower can be run interactively, by installing
<varname>nodePackages.bower</varname>. More interestingly, the Bower
components can be declared in a Nix derivation, with the help of
<varname>nodePackages.bower2nix</varname>.
</para>
<section xml:id="ssec-bower2nix-usage">
<title><command>bower2nix</command> usage</title>
<para>
Suppose you have a <filename>bower.json</filename> with the following
contents:
<example xml:id="ex-bowerJson">
<title><filename>bower.json</filename></title>
<programlisting language="json">
<![CDATA[{
"name": "my-web-app",
"dependencies": {
"angular": "~1.5.0",
"bootstrap": "~3.3.6"
}
}]]>
</programlisting>
</example>
</para>
<para>
Running <command>bower2nix</command> will produce something like the
following output:
<programlisting language="nix">
<![CDATA[{ fetchbower, buildEnv }:
buildEnv { name = "bower-env"; ignoreCollisions = true; paths = [
(fetchbower "angular" "1.5.3" "~1.5.0" "1749xb0firxdra4rzadm4q9x90v6pzkbd7xmcyjk6qfza09ykk9y")
(fetchbower "bootstrap" "3.3.6" "~3.3.6" "1vvqlpbfcy0k5pncfjaiskj3y6scwifxygfqnw393sjfxiviwmbv")
(fetchbower "jquery" "2.2.2" "1.9.1 - 2" "10sp5h98sqwk90y4k6hbdviwqzvzwqf47r3r51pakch5ii2y7js1")
]; }]]>
</programlisting>
</para>
<para>
Using the <command>bower2nix</command> command line arguments, the output
can be redirected to a file. A name like
<filename>bower-packages.nix</filename> would be fine.
</para>
<para>
The resulting derivation is a union of all the downloaded Bower packages
(and their dependencies). To use it, they still need to be linked together
by Bower, which is where <varname>buildBowerComponents</varname> is useful.
</para>
</section>
<section xml:id="ssec-build-bower-components">
<title><varname>buildBowerComponents</varname> function</title>
<para>
The function is implemented in
<link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/bower-modules/generic/default.nix">
<filename>pkgs/development/bower-modules/generic/default.nix</filename></link>.
Example usage:
<example xml:id="ex-buildBowerComponents">
<title>buildBowerComponents</title>
<programlisting language="nix">
bowerComponents = buildBowerComponents {
name = "my-web-app";
generated = ./bower-packages.nix; <co xml:id="ex-buildBowerComponents-1" />
src = myWebApp; <co xml:id="ex-buildBowerComponents-2" />
};
</programlisting>
</example>
</para>
<para>
In <xref linkend="ex-buildBowerComponents" />, the following arguments are
of special significance to the function:
<calloutlist>
<callout arearefs="ex-buildBowerComponents-1">
<para>
<varname>generated</varname> specifies the file which was created by
<command>bower2nix</command>.
</para>
</callout>
<callout arearefs="ex-buildBowerComponents-2">
<para>
<varname>src</varname> is your project's sources. It needs to contain a
<filename>bower.json</filename> file.
</para>
</callout>
</calloutlist>
</para>
<para>
<varname>buildBowerComponents</varname> will run Bower to link together the
output of <command>bower2nix</command>, resulting in a
<filename>bower_components</filename> directory which can be used.
</para>
<para>
Here is an example of a web frontend build process using
<command>gulp</command>. You might use <command>grunt</command>, or anything
else.
</para>
<example xml:id="ex-bowerGulpFile">
<title>Example build script (<filename>gulpfile.js</filename>)</title>
<programlisting language="javascript">
<![CDATA[var gulp = require('gulp');
gulp.task('default', [], function () {
gulp.start('build');
});
gulp.task('build', [], function () {
console.log("Just a dummy gulp build");
gulp
.src(["./bower_components/**/*"])
.pipe(gulp.dest("./gulpdist/"));
});]]>
</programlisting>
</example>
<example xml:id="ex-buildBowerComponentsDefaultNix">
<title>Full example — <filename>default.nix</filename></title>
<programlisting language="nix">
{ myWebApp ? { outPath = ./.; name = "myWebApp"; }
, pkgs ? import &lt;nixpkgs&gt; {}
}:
pkgs.stdenv.mkDerivation {
name = "my-web-app-frontend";
src = myWebApp;
buildInputs = [ pkgs.nodePackages.gulp ];
bowerComponents = pkgs.buildBowerComponents { <co xml:id="ex-buildBowerComponentsDefault-1" />
name = "my-web-app";
generated = ./bower-packages.nix;
src = myWebApp;
};
buildPhase = ''
cp --reflink=auto --no-preserve=mode -R $bowerComponents/bower_components . <co xml:id="ex-buildBowerComponentsDefault-2" />
export HOME=$PWD <co xml:id="ex-buildBowerComponentsDefault-3" />
${pkgs.nodePackages.gulp}/bin/gulp build <co xml:id="ex-buildBowerComponentsDefault-4" />
'';
installPhase = "mv gulpdist $out";
}
</programlisting>
</example>
<para>
A few notes about <xref linkend="ex-buildBowerComponentsDefaultNix" />:
<calloutlist>
<callout arearefs="ex-buildBowerComponentsDefault-1">
<para>
The result of <varname>buildBowerComponents</varname> is an input to the
frontend build.
</para>
</callout>
<callout arearefs="ex-buildBowerComponentsDefault-2">
<para>
Whether to symlink or copy the <filename>bower_components</filename>
directory depends on the build tool in use. In this case a copy is used
to avoid <command>gulp</command> silliness with permissions.
</para>
</callout>
<callout arearefs="ex-buildBowerComponentsDefault-3">
<para>
<command>gulp</command> requires <varname>HOME</varname> to refer to a
writeable directory.
</para>
</callout>
<callout arearefs="ex-buildBowerComponentsDefault-4">
<para>
The actual build command. Other tools could be used.
</para>
</callout>
</calloutlist>
</para>
</section>
<section xml:id="ssec-bower2nix-troubleshooting">
<title>Troubleshooting</title>
<variablelist>
<varlistentry>
<term>
<literal>ENOCACHE</literal> errors from <varname>buildBowerComponents</varname>
</term>
<listitem>
<para>
This means that Bower was looking for a package version which doesn't
exist in the generated <filename>bower-packages.nix</filename>.
</para>
<para>
If <filename>bower.json</filename> has been updated, then run
<command>bower2nix</command> again.
</para>
<para>
It could also be a bug in <command>bower2nix</command> or
<command>fetchbower</command>. If possible, try reformulating the version
specification in <filename>bower.json</filename>.
</para>
</listitem>
</varlistentry>
</variablelist>
</section>
</section>

View File

@@ -1,65 +1,41 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="sec-language-coq">
<title>Coq</title>
<para>
Coq libraries should be installed in
<literal>$(out)/lib/coq/${coq.coq-version}/user-contrib/</literal>. Such
directories are automatically added to the <literal>$COQPATH</literal>
environment variable by the hook defined in the Coq derivation.
</para>
<para>
Some libraries require OCaml and sometimes also Camlp5 or findlib. The exact
versions that were used to build Coq are saved in the
<literal>coq.ocaml</literal> and <literal>coq.camlp5</literal> and
<literal>coq.findlib</literal> attributes.
</para>
<para>
Coq libraries may be compatible with some specific versions of Coq only. The
<literal>compatibleCoqVersions</literal> attribute is used to precisely
select those versions of Coq that are compatible with this derivation.
</para>
<para>
Here is a simple package example. It is a pure Coq library, thus it depends
on Coq. It builds on the Mathematical Components library, thus it also takes
<literal>mathcomp</literal> as <literal>buildInputs</literal>. Its
<literal>Makefile</literal> has been generated using
<literal>coq_makefile</literal> so we only have to set the
<literal>$COQLIB</literal> variable at install time.
</para>
<programlisting>
{ stdenv, fetchFromGitHub, coq, mathcomp }:
stdenv.mkDerivation rec {
name = "coq${coq.coq-version}-multinomials-${version}";
version = "1.0";
src = fetchFromGitHub {
owner = "math-comp";
repo = "multinomials";
rev = version;
sha256 = "1qmbxp1h81cy3imh627pznmng0kvv37k4hrwi2faa101s6bcx55m";
<title>Coq</title>
<para>
Coq libraries should be installed in
<literal>$(out)/lib/coq/${coq.coq-version}/user-contrib/</literal>.
Such directories are automatically added to the
<literal>$COQPATH</literal> environment variable by the hook defined
in the Coq derivation.
</para>
<para>
Some libraries require OCaml and sometimes also Camlp5. The exact
versions that were used to build Coq are saved in the
<literal>coq.ocaml</literal> and <literal>coq.camlp5</literal>
attributes.
</para>
<para>
Here is a simple package example. It is a pure Coq library, thus it
only depends on Coq. Its <literal>makefile</literal> has been
generated using <literal>coq_makefile</literal> so we only have to
set the <literal>$COQLIB</literal> variable at install time.
</para>
<programlisting>
{stdenv, fetchurl, coq}:
stdenv.mkDerivation {
src = fetchurl {
url = http://coq.inria.fr/pylons/contribs/files/Karatsuba/v8.4/Karatsuba.tar.gz;
sha256 = "0ymfpv4v49k4fm63nq6gcl1hbnnxrvjjp7yzc4973n49b853c5b1";
};
name = "coq-karatsuba";
buildInputs = [ coq ];
propagatedBuildInputs = [ mathcomp ];
installFlags = "COQLIB=$(out)/lib/coq/${coq.coq-version}/";
meta = {
description = "A Coq/SSReflect Library for Monoidal Rings and Multinomials";
inherit (src.meta) homepage;
license = stdenv.lib.licenses.cecill-b;
inherit (coq.meta) platforms;
};
passthru = {
compatibleCoqVersions = v: builtins.elem v [ "8.5" "8.6" "8.7" ];
};
}
</programlisting>
</section>

View File

@@ -1,185 +0,0 @@
# User's Guide to Emscripten in Nixpkgs
[Emscripten](https://github.com/kripken/emscripten): An LLVM-to-JavaScript Compiler
This section of the manual covers how to use `emscripten` in nixpkgs.
Minimal requirements:
* nix
* nixpkgs
Modes of use of `emscripten`:
* **Imperative usage** (on the command line):
If you want to work with `emcc`, `emconfigure` and `emmake` as you are used to from Ubuntu and similar distributions you can use these commands:
* `nix-env -i emscripten`
* `nix-shell -p emscripten`
* **Declarative usage**:
This mode is far more power full since this makes use of `nix` for dependency management of emscripten libraries and targets by using the `mkDerivation` which is implemented by `pkgs.emscriptenStdenv` and `pkgs.buildEmscriptenPackage`. The source for the packages is in `pkgs/top-level/emscripten-packages.nix` and the abstraction behind it in `pkgs/development/em-modules/generic/default.nix`.
* build and install all packages:
* `nix-env -iA emscriptenPackages`
* dev-shell for zlib implementation hacking:
* `nix-shell -A emscriptenPackages.zlib`
## Imperative usage
A few things to note:
* `export EMCC_DEBUG=2` is nice for debugging
* `~/.emscripten`, the build artifact cache sometimes creates issues and needs to be removed from time to time
## Declarative usage
Let's see two different examples from `pkgs/top-level/emscripten-packages.nix`:
* `pkgs.zlib.override`
* `pkgs.buildEmscriptenPackage`
Both are interesting concepts.
A special requirement of the `pkgs.buildEmscriptenPackage` is the `doCheck = true` is a default meaning that each emscriptenPackage requires a `checkPhase` implemented.
* Use `export EMCC_DEBUG=2` from within a emscriptenPackage's `phase` to get more detailed debug output what is going wrong.
* ~/.emscripten cache is requiring us to set `HOME=$TMPDIR` in individual phases. This makes compilation slower but also makes it more deterministic.
### Usage 1: pkgs.zlib.override
This example uses `zlib` from nixpkgs but instead of compiling **C** to **ELF** it compiles **C** to **JS** since we were using `pkgs.zlib.override` and changed stdenv to `pkgs.emscriptenStdenv`. A few adaptions and hacks were set in place to make it working. One advantage is that when `pkgs.zlib` is updated, it will automatically update this package as well. However, this can also be the downside...
See the `zlib` example:
zlib = (pkgs.zlib.override {
stdenv = pkgs.emscriptenStdenv;
}).overrideDerivation
(old: rec {
buildInputs = old.buildInputs ++ [ pkgconfig ];
# we need to reset this setting!
NIX_CFLAGS_COMPILE="";
configurePhase = ''
# FIXME: Some tests require writing at $HOME
HOME=$TMPDIR
runHook preConfigure
#export EMCC_DEBUG=2
emconfigure ./configure --prefix=$out --shared
runHook postConfigure
'';
dontStrip = true;
outputs = [ "out" ];
buildPhase = ''
emmake make
'';
installPhase = ''
emmake make install
'';
checkPhase = ''
echo "================= testing zlib using node ================="
echo "Compiling a custom test"
set -x
emcc -O2 -s EMULATE_FUNCTION_POINTER_CASTS=1 test/example.c -DZ_SOLO \
libz.so.${old.version} -I . -o example.js
echo "Using node to execute the test"
${pkgs.nodejs}/bin/node ./example.js
set +x
if [ $? -ne 0 ]; then
echo "test failed for some reason"
exit 1;
else
echo "it seems to work! very good."
fi
echo "================= /testing zlib using node ================="
'';
postPatch = pkgs.stdenv.lib.optionalString pkgs.stdenv.isDarwin ''
substituteInPlace configure \
--replace '/usr/bin/libtool' 'ar' \
--replace 'AR="libtool"' 'AR="ar"' \
--replace 'ARFLAGS="-o"' 'ARFLAGS="-r"'
'';
});
### Usage 2: pkgs.buildEmscriptenPackage
This `xmlmirror` example features a emscriptenPackage which is defined completely from this context and no `pkgs.zlib.override` is used.
xmlmirror = pkgs.buildEmscriptenPackage rec {
name = "xmlmirror";
buildInputs = [ pkgconfig autoconf automake libtool gnumake libxml2 nodejs openjdk json_c ];
nativeBuildInputs = [ pkgconfig zlib ];
src = pkgs.fetchgit {
url = "https://gitlab.com/odfplugfest/xmlmirror.git";
rev = "4fd7e86f7c9526b8f4c1733e5c8b45175860a8fd";
sha256 = "1jasdqnbdnb83wbcnyrp32f36w3xwhwp0wq8lwwmhqagxrij1r4b";
};
configurePhase = ''
rm -f fastXmlLint.js*
# a fix for ERROR:root:For asm.js, TOTAL_MEMORY must be a multiple of 16MB, was 234217728
# https://gitlab.com/odfplugfest/xmlmirror/issues/8
sed -e "s/TOTAL_MEMORY=234217728/TOTAL_MEMORY=268435456/g" -i Makefile.emEnv
# https://github.com/kripken/emscripten/issues/6344
# https://gitlab.com/odfplugfest/xmlmirror/issues/9
sed -e "s/\$(JSONC_LDFLAGS) \$(ZLIB_LDFLAGS) \$(LIBXML20_LDFLAGS)/\$(JSONC_LDFLAGS) \$(LIBXML20_LDFLAGS) \$(ZLIB_LDFLAGS) /g" -i Makefile.emEnv
# https://gitlab.com/odfplugfest/xmlmirror/issues/11
sed -e "s/-o fastXmlLint.js/-s EXTRA_EXPORTED_RUNTIME_METHODS='[\"ccall\", \"cwrap\"]' -o fastXmlLint.js/g" -i Makefile.emEnv
'';
buildPhase = ''
HOME=$TMPDIR
make -f Makefile.emEnv
'';
outputs = [ "out" "doc" ];
installPhase = ''
mkdir -p $out/share
mkdir -p $doc/share/${name}
cp Demo* $out/share
cp -R codemirror-5.12 $out/share
cp fastXmlLint.js* $out/share
cp *.xsd $out/share
cp *.js $out/share
cp *.xhtml $out/share
cp *.html $out/share
cp *.json $out/share
cp *.rng $out/share
cp README.md $doc/share/${name}
'';
checkPhase = ''
'';
};
### Declarative debugging
Use `nix-shell -I nixpkgs=/some/dir/nixpkgs -A emscriptenPackages.libz` and from there you can go trough the individual steps. This makes it easy to build a good `unit test` or list the files of the project.
1. `nix-shell -I nixpkgs=/some/dir/nixpkgs -A emscriptenPackages.libz`
2. `cd /tmp/`
3. `unpackPhase`
4. cd libz-1.2.3
5. `configurePhase`
6. `buildPhase`
7. ... happy hacking...
## Summary
Using this toolchain makes it easy to leverage `nix` from NixOS, MacOSX or even Windows (WSL+ubuntu+nix). This toolchain is reproducible, behaves like the rest of the packages from nixpkgs and contains a set of well working examples to learn and adapt from.
If in trouble, ask the maintainers.

View File

@@ -1,160 +1,124 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="sec-language-go">
<title>Go</title>
<para>
The function <varname>buildGoPackage</varname> builds standard Go programs.
</para>
<title>Go</title>
<example xml:id='ex-buildGoPackage'>
<title>buildGoPackage</title>
<para>The function <varname>buildGoPackage</varname> builds
standard Go packages.
</para>
<example xml:id='ex-buildGoPackage'><title>buildGoPackage</title>
<programlisting>
deis = buildGoPackage rec {
name = "deis-${version}";
version = "1.13.0";
goPackagePath = "github.com/deis/deis"; <co xml:id='ex-buildGoPackage-1' />
subPackages = [ "client" ]; <co xml:id='ex-buildGoPackage-2' />
net = buildGoPackage rec {
name = "go.net-${rev}";
goPackagePath = "golang.org/x/net"; <co xml:id='ex-buildGoPackage-1' />
subPackages = [ "ipv4" "ipv6" ]; <co xml:id='ex-buildGoPackage-2' />
rev = "e0403b4e005";
src = fetchFromGitHub {
owner = "deis";
repo = "deis";
rev = "v${version}";
sha256 = "1qv9lxqx7m18029lj8cw3k7jngvxs4iciwrypdy0gd2nnghc68sw";
inherit rev;
owner = "golang";
repo = "net";
sha256 = "1g7cjzw4g4301a3yqpbk8n1d4s97sfby2aysl275x04g0zh8jxqp";
};
goDeps = ./deps.nix; <co xml:id='ex-buildGoPackage-3' />
buildFlags = "--tags release"; <co xml:id='ex-buildGoPackage-4' />
}
goPackageAliases = [ "code.google.com/p/go.net" ]; <co xml:id='ex-buildGoPackage-3' />
propagatedBuildInputs = [ goPackages.text ]; <co xml:id='ex-buildGoPackage-4' />
buildFlags = "--tags release"; <co xml:id='ex-buildGoPackage-5' />
disabled = isGo13;<co xml:id='ex-buildGoPackage-6' />
};
</programlisting>
</example>
</example>
<para>
<xref linkend='ex-buildGoPackage'/> is an example expression using
buildGoPackage, the following arguments are of special significance to the
function:
<calloutlist>
<callout arearefs='ex-buildGoPackage-1'>
<para>
<varname>goPackagePath</varname> specifies the package's canonical Go
import path.
</para>
</callout>
<callout arearefs='ex-buildGoPackage-2'>
<para>
<varname>subPackages</varname> limits the builder from building child
packages that have not been listed. If <varname>subPackages</varname> is
not specified, all child packages will be built.
</para>
<para>
In this example only <literal>github.com/deis/deis/client</literal> will
be built.
</para>
</callout>
<callout arearefs='ex-buildGoPackage-3'>
<para>
<varname>goDeps</varname> is where the Go dependencies of a Go program are
listed as a list of package source identified by Go import path. It could
be imported as a separate <varname>deps.nix</varname> file for
readability. The dependency data structure is described below.
</para>
</callout>
<callout arearefs='ex-buildGoPackage-4'>
<para>
<varname>buildFlags</varname> is a list of flags passed to the go build
command.
</para>
</callout>
</calloutlist>
</para>
<para><xref linkend='ex-buildGoPackage'/> is an example expression using buildGoPackage,
the following arguments are of special significance to the function:
<para>
The <varname>goDeps</varname> attribute can be imported from a separate
<varname>nix</varname> file that defines which Go libraries are needed and
should be included in <varname>GOPATH</varname> for
<varname>buildPhase</varname>.
</para>
<calloutlist>
<example xml:id='ex-goDeps'>
<title>deps.nix</title>
<programlisting>
[ <co xml:id='ex-goDeps-1' />
{
goPackagePath = "gopkg.in/yaml.v2"; <co xml:id='ex-goDeps-2' />
fetch = {
type = "git"; <co xml:id='ex-goDeps-3' />
url = "https://gopkg.in/yaml.v2";
rev = "a83829b6f1293c91addabc89d0571c246397bbf4";
sha256 = "1m4dsmk90sbi17571h6pld44zxz7jc4lrnl4f27dpd1l8g5xvjhh";
};
}
{
goPackagePath = "github.com/docopt/docopt-go";
fetch = {
type = "git";
url = "https://github.com/docopt/docopt-go";
rev = "784ddc588536785e7299f7272f39101f7faccc3f";
sha256 = "0wwz48jl9fvl1iknvn9dqr4gfy1qs03gxaikrxxp9gry6773v3sj";
};
}
]
</programlisting>
</example>
<para>
<calloutlist>
<callout arearefs='ex-goDeps-1'>
<callout arearefs='ex-buildGoPackage-1'>
<para>
<varname>goDeps</varname> is a list of Go dependencies.
<varname>goPackagePath</varname> specifies the package's canonical Go import path.
</para>
</callout>
<callout arearefs='ex-goDeps-2'>
<para>
<varname>goPackagePath</varname> specifies Go package import path.
</para>
</callout>
<callout arearefs='ex-goDeps-3'>
<para>
<varname>fetch type</varname> that needs to be used to get package source.
If <varname>git</varname> is used there should be <varname>url</varname>,
<varname>rev</varname> and <varname>sha256</varname> defined next to it.
</para>
</callout>
</calloutlist>
</para>
</callout>
<para>
To extract dependency information from a Go package in automated way use
<link xlink:href="https://github.com/kamilchm/go2nix">go2nix</link>. It can
produce complete derivation and <varname>goDeps</varname> file for Go
programs.
</para>
<callout arearefs='ex-buildGoPackage-2'>
<para>
<varname>subPackages</varname> limits the builder from building child packages that
have not been listed. If <varname>subPackages</varname> is not specified, all child
packages will be built.
</para>
<para>
In this example only <literal>code.google.com/p/go.net/ipv4</literal> and
<literal>code.google.com/p/go.net/ipv6</literal> will be built.
</para>
</callout>
<callout arearefs='ex-buildGoPackage-3'>
<para>
<varname>goPackageAliases</varname> is a list of alternative import paths
that are valid for this library.
Packages that depend on this library will automatically rename
import paths that match any of the aliases to <literal>goPackagePath</literal>.
</para>
<para>
In this example imports will be renamed from
<literal>code.google.com/p/go.net</literal> to
<literal>golang.org/x/net</literal> in every package that depend on the
<literal>go.net</literal> library.
</para>
</callout>
<callout arearefs='ex-buildGoPackage-4'>
<para>
<varname>propagatedBuildInputs</varname> is where the dependencies of a Go library are
listed. Only libraries should list <varname>propagatedBuildInputs</varname>. If a standalone
program is being built instead, use <varname>buildInputs</varname>. If a library's tests require
additional dependencies that are not propagated, they should be listed in <varname>buildInputs</varname>.
</para>
</callout>
<callout arearefs='ex-buildGoPackage-5'>
<para>
<varname>buildFlags</varname> is a list of flags passed to the go build command.
</para>
</callout>
<callout arearefs='ex-buildGoPackage-6'>
<para>
If <varname>disabled</varname> is <literal>true</literal>,
nix will refuse to build this package.
</para>
<para>
In this example the package will not be built for go 1.3. The <literal>isGo13</literal>
is an utility function that returns <literal>true</literal> if go used to build the
package has version 1.3.x.
</para>
</callout>
</calloutlist>
</para>
<para>
Reusable Go libraries may be found in the <varname>goPackages</varname> set. You can test
build a Go package as follows:
<para>
<varname>buildGoPackage</varname> produces
<xref linkend='chap-multiple-output' xrefstyle="select: title" /> where
<varname>bin</varname> includes program binaries. You can test build a Go
binary as follows:
<screen>
$ nix-build -A deis.bin
</screen>
or build all outputs with:
<screen>
$ nix-build -A deis.all
</screen>
<varname>bin</varname> output will be installed by default with
<varname>nix-env -i</varname> or <varname>systemPackages</varname>.
</para>
$ nix-build -A goPackages.net
</screen>
</para>
<para>
You may use Go packages installed into the active Nix profiles by adding
the following to your ~/.bashrc:
<para>
You may use Go packages installed into the active Nix profiles by adding the
following to your ~/.bashrc:
<screen>
for p in $NIX_PROFILES; do
GOPATH="$p/share/go:$GOPATH"
done
</screen>
</para>
</para>
<para>To extract dependency information from a Go package in automated way use <link xlink:href="https://github.com/cstrahan/go2nix">go2nix</link>.</para>
</section>

File diff suppressed because it is too large Load Diff

View File

@@ -1,39 +0,0 @@
Idris packages
==============
This directory contains build rules for idris packages. In addition,
it contains several functions to build and compose those packages.
Everything is exposed to the user via the `idrisPackages` attribute.
callPackage
------------
This is like the normal nixpkgs callPackage function, specialized to
idris packages.
builtins
---------
This is a list of all of the libraries that come packaged with Idris
itself.
build-idris-package
--------------------
A function to build an idris package. Its sole argument is a set like
you might pass to `stdenv.mkDerivation`, except `build-idris-package`
sets several attributes for you. See `build-idris-package.nix` for
details.
build-builtin-package
----------------------
A version of `build-idris-package` specialized to builtin libraries.
Mostly for internal use.
with-packages
-------------
Bundle idris together with a list of packages. Because idris currently
only supports a single directory in its library path, you must include
all desired libraries here, including `prelude` and `base`.

View File

@@ -1,31 +1,45 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="chap-language-support">
<title>Support for specific programming languages and frameworks</title>
<para>
The <link linkend="chap-stdenv">standard build environment</link> makes it
easy to build typical Autotools-based packages with very little code. Any
other kind of package can be accomodated by overriding the appropriate phases
of <literal>stdenv</literal>. However, there are specialised functions in
Nixpkgs to easily build packages for other programming languages, such as
Perl or Haskell. These are described in this chapter.
</para>
<xi:include href="beam.xml" />
<xi:include href="bower.xml" />
<xi:include href="coq.xml" />
<xi:include href="go.xml" />
<xi:include href="haskell.section.xml" />
<xi:include href="idris.section.xml" />
<xi:include href="java.xml" />
<xi:include href="lua.xml" />
<xi:include href="node.section.xml" />
<xi:include href="perl.xml" />
<xi:include href="python.section.xml" />
<xi:include href="qt.xml" />
<xi:include href="r.section.xml" />
<xi:include href="ruby.xml" />
<xi:include href="rust.section.xml" />
<xi:include href="texlive.xml" />
<xi:include href="vim.section.xml" />
<xi:include href="emscripten.section.xml" />
<title>Support for specific programming languages and frameworks</title>
<para>The <link linkend="chap-stdenv">standard build
environment</link> makes it easy to build typical Autotools-based
packages with very little code. Any other kind of package can be
accomodated by overriding the appropriate phases of
<literal>stdenv</literal>. However, there are specialised functions
in Nixpkgs to easily build packages for other programming languages,
such as Perl or Haskell. These are described in this chapter.</para>
<xi:include href="perl.xml" />
<xi:include href="python.xml" />
<xi:include href="ruby.xml" />
<xi:include href="go.xml" />
<xi:include href="java.xml" />
<xi:include href="lua.xml" />
<xi:include href="coq.xml" />
<xi:include href="idris.xml" /> <!-- generated from ../../pkgs/development/idris-modules/README.md -->
<xi:include href="r.xml" /> <!-- generated from ../../pkgs/development/r-modules/README.md -->
<xi:include href="qt.xml" />
<xi:include href="texlive.xml" />
<!--
<section><title>Haskell</title>
<para>TODO</para>
</section>
<section><title>TeX / LaTeX</title>
<para>* Special support for building TeX documents</para>
</section>
-->
</chapter>

View File

@@ -1,10 +1,11 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="sec-language-java">
<title>Java</title>
<para>
Ant-based Java packages are typically built from source as follows:
<title>Java</title>
<para>Ant-based Java packages are typically built from source as follows:
<programlisting>
stdenv.mkDerivation {
name = "...";
@@ -15,36 +16,33 @@ stdenv.mkDerivation {
buildPhase = "ant";
}
</programlisting>
Note that <varname>jdk</varname> is an alias for the OpenJDK (self-built
where available, or pre-built via Zulu). Platforms with OpenJDK not (yet) in
Nixpkgs (<literal>Aarch32</literal>, <literal>Aarch64</literal>) point to the
(unfree) <literal>oraclejdk</literal>.
</para>
<para>
JAR files that are intended to be used by other packages should be installed
in <filename>$out/share/java</filename>. JDKs have a stdenv setup hook that
add any JARs in the <filename>share/java</filename> directories of the build
inputs to the <envar>CLASSPATH</envar> environment variable. For instance, if
the package <literal>libfoo</literal> installs a JAR named
<filename>foo.jar</filename> in its <filename>share/java</filename>
directory, and another package declares the attribute
Note that <varname>jdk</varname> is an alias for the OpenJDK.</para>
<para>JAR files that are intended to be used by other packages should
be installed in <filename>$out/share/java</filename>. The OpenJDK has
a stdenv setup hook that adds any JARs in the
<filename>share/java</filename> directories of the build inputs to the
<envar>CLASSPATH</envar> environment variable. For instance, if the
package <literal>libfoo</literal> installs a JAR named
<filename>foo.jar</filename> in its <filename>share/java</filename>
directory, and another package declares the attribute
<programlisting>
buildInputs = [ jdk libfoo ];
</programlisting>
then <envar>CLASSPATH</envar> will be set to
<filename>/nix/store/...-libfoo/share/java/foo.jar</filename>.
</para>
<para>
Private JARs should be installed in a location like
<filename>$out/share/<replaceable>package-name</replaceable></filename>.
</para>
then <envar>CLASSPATH</envar> will be set to
<filename>/nix/store/...-libfoo/share/java/foo.jar</filename>.</para>
<para>Private JARs
should be installed in a location like
<filename>$out/share/<replaceable>package-name</replaceable></filename>.</para>
<para>If your Java package provides a program, you need to generate a
wrapper script to run it using the OpenJRE. You can use
<literal>makeWrapper</literal> for this:
<para>
If your Java package provides a program, you need to generate a wrapper
script to run it using the OpenJRE. You can use
<literal>makeWrapper</literal> for this:
<programlisting>
buildInputs = [ makeWrapper ];
@@ -55,30 +53,32 @@ installPhase =
--add-flags "-cp $out/share/java/foo.jar org.foo.Main"
'';
</programlisting>
Note the use of <literal>jre</literal>, which is the part of the OpenJDK
package that contains the Java Runtime Environment. By using
<literal>${jre}/bin/java</literal> instead of
<literal>${jdk}/bin/java</literal>, you prevent your package from depending
on the JDK at runtime.
</para>
<para>
Note all JDKs passthru <literal>home</literal>, so if your application
requires environment variables like <envar>JAVA_HOME</envar> being set, that
can be done in a generic fashion with the <literal>--set</literal> argument
of <literal>makeWrapper</literal>:
Note the use of <literal>jre</literal>, which is the part of the
OpenJDK package that contains the Java Runtime Environment. By using
<literal>${jre}/bin/java</literal> instead of
<literal>${jdk}/bin/java</literal>, you prevent your package from
depending on the JDK at runtime.</para>
<para>It is possible to use a different Java compiler than
<command>javac</command> from the OpenJDK. For instance, to use the
Eclipse Java Compiler:
<programlisting>
--set JAVA_HOME ${jdk.home}
buildInputs = [ jre ant ecj ];
</programlisting>
</para>
<para>
It is possible to use a different Java compiler than <command>javac</command>
from the OpenJDK. For instance, to use the GNU Java Compiler:
(Note that here you dont need the full JDK as an input, but just the
JRE.) The ECJ has a stdenv setup hook that sets some environment
variables to cause Ant to use ECJ, but this doesnt work with all Ant
files. Similarly, you can use the GNU Java Compiler:
<programlisting>
buildInputs = [ gcj ant ];
</programlisting>
Here, Ant will automatically use <command>gij</command> (the GNU Java
Runtime) instead of the OpenJRE.
</para>
Here, Ant will automatically use <command>gij</command> (the GNU Java
Runtime) instead of the OpenJRE.</para>
</section>

View File

@@ -1,22 +1,24 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="sec-language-lua">
<title>Lua</title>
<para>
Lua packages are built by the <varname>buildLuaPackage</varname> function.
This function is implemented in
<link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/lua-modules/generic/default.nix">
<title>Lua</title>
<para>
Lua packages are built by the <varname>buildLuaPackage</varname> function. This function is
implemented
in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/lua-modules/generic/default.nix">
<filename>pkgs/development/lua-modules/generic/default.nix</filename></link>
and works similarly to <varname>buildPerlPackage</varname>. (See
<xref linkend="sec-language-perl"/> for details.)
</para>
</para>
<para>
Lua packages are defined in
<link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/lua-packages.nix"><filename>pkgs/top-level/lua-packages.nix</filename></link>.
<para>
Lua packages are defined
in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/lua-packages.nix"><filename>pkgs/top-level/lua-packages.nix</filename></link>.
Most of them are simple. For example:
<programlisting>
<programlisting>
fileSystem = buildLuaPackage {
name = "filesystem-1.6.2";
src = fetchurl {
@@ -30,19 +32,20 @@ fileSystem = buildLuaPackage {
};
};
</programlisting>
</para>
</para>
<para>
<para>
Though, more complicated package should be placed in a seperate file in
<link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/lua-modules"><filename>pkgs/development/lua-modules</filename></link>.
</para>
</para>
<para>
Lua packages accept additional parameter <varname>disabled</varname>, which defines
the condition of disabling package from luaPackages. For example, if package has
<varname>disabled</varname> assigned to <literal>lua.luaversion != "5.1"</literal>,
it will not be included in any luaPackages except lua51Packages, making it
only be built for lua 5.1.
</para>
<para>
Lua packages accept additional parameter <varname>disabled</varname>, which
defines the condition of disabling package from luaPackages. For example, if
package has <varname>disabled</varname> assigned to <literal>lua.luaversion
!= "5.1"</literal>, it will not be included in any luaPackages except
lua51Packages, making it only be built for lua 5.1.
</para>
</section>

View File

@@ -1,51 +0,0 @@
Node.js packages
================
The `pkgs/development/node-packages` folder contains a generated collection of
[NPM packages](https://npmjs.com/) that can be installed with the Nix package
manager.
As a rule of thumb, the package set should only provide *end user* software
packages, such as command-line utilities. Libraries should only be added to the
package set if there is a non-NPM package that requires it.
When it is desired to use NPM libraries in a development project, use the
`node2nix` generator directly on the `package.json` configuration file of the
project.
The package set also provides support for multiple Node.js versions. The policy
is that a new package should be added to the collection for the latest stable LTS
release (which is currently 8.x), unless there is an explicit reason to support
a different release.
If your package uses native addons, you need to examine what kind of native
build system it uses. Here are some examples:
* `node-gyp`
* `node-gyp-builder`
* `node-pre-gyp`
After you have identified the correct system, you need to override your package
expression while adding in build system as a build input. For example, `dat`
requires `node-gyp-build`, so we override its expression in `default-v8.nix`:
```nix
dat = nodePackages.dat.override (oldAttrs: {
buildInputs = oldAttrs.buildInputs ++ [ nodePackages.node-gyp-build ];
});
```
To add a package from NPM to nixpkgs:
1. Modify `pkgs/development/node-packages/node-packages-v8.json` to add, update
or remove package entries. (Or `pkgs/development/node-packages/node-packages-v10.json`
for packages depending on Node.js 10.x)
2. Run the script: `(cd pkgs/development/node-packages && ./generate.sh)`.
3. Build your new package to test your changes:
`cd /path/to/nixpkgs && nix-build -A nodePackages.<new-or-updated-package>`.
To build against a specific Node.js version (e.g. 10.x):
`nix-build -A nodePackages_10_x.<new-or-updated-package>`
4. Add and commit all modified and generated files.
For more information about the generation process, consult the
[README.md](https://github.com/svanderburg/node2nix) file of the `node2nix`
tool.

View File

@@ -1,27 +1,24 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="sec-language-perl">
<title>Perl</title>
<para>
Nixpkgs provides a function <varname>buildPerlPackage</varname>, a generic
package builder function for any Perl package that has a standard
<varname>Makefile.PL</varname>. Its implemented in
<link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/perl-modules/generic"><filename>pkgs/development/perl-modules/generic</filename></link>.
</para>
<title>Perl</title>
<para>
Perl packages from CPAN are defined in
<link
<para>Nixpkgs provides a function <varname>buildPerlPackage</varname>,
a generic package builder function for any Perl package that has a
standard <varname>Makefile.PL</varname>. Its implemented in <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/perl-modules/generic"><filename>pkgs/development/perl-modules/generic</filename></link>.</para>
<para>Perl packages from CPAN are defined in <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/perl-packages.nix"><filename>pkgs/top-level/perl-packages.nix</filename></link>,
rather than <filename>pkgs/all-packages.nix</filename>. Most Perl packages
are so straight-forward to build that they are defined here directly, rather
than having a separate function for each package called from
<filename>perl-packages.nix</filename>. However, more complicated packages
should be put in a separate file, typically in
<filename>pkgs/development/perl-modules</filename>. Here is an example of the
former:
rather than <filename>pkgs/all-packages.nix</filename>. Most Perl
packages are so straight-forward to build that they are defined here
directly, rather than having a separate function for each package
called from <filename>perl-packages.nix</filename>. However, more
complicated packages should be put in a separate file, typically in
<filename>pkgs/development/perl-modules</filename>. Here is an
example of the former:
<programlisting>
ClassC3 = buildPerlPackage rec {
name = "Class-C3-0.21";
@@ -31,72 +28,74 @@ ClassC3 = buildPerlPackage rec {
};
};
</programlisting>
Note the use of <literal>mirror://cpan/</literal>, and the
<literal>${name}</literal> in the URL definition to ensure that the name
attribute is consistent with the source that were actually downloading.
Perl packages are made available in <filename>all-packages.nix</filename>
through the variable <varname>perlPackages</varname>. For instance, if you
have a package that needs <varname>ClassC3</varname>, you would typically
write
Note the use of <literal>mirror://cpan/</literal>, and the
<literal>${name}</literal> in the URL definition to ensure that the
name attribute is consistent with the source that were actually
downloading. Perl packages are made available in
<filename>all-packages.nix</filename> through the variable
<varname>perlPackages</varname>. For instance, if you have a package
that needs <varname>ClassC3</varname>, you would typically write
<programlisting>
foo = import ../path/to/foo.nix {
inherit stdenv fetchurl ...;
inherit (perlPackages) ClassC3;
};
</programlisting>
in <filename>all-packages.nix</filename>. You can test building a Perl
package as follows:
in <filename>all-packages.nix</filename>. You can test building a
Perl package as follows:
<screen>
$ nix-build -A perlPackages.ClassC3
</screen>
<varname>buildPerlPackage</varname> adds <literal>perl-</literal> to the
start of the name attribute, so the package above is actually called
<literal>perl-Class-C3-0.21</literal>. So to install it, you can say:
<varname>buildPerlPackage</varname> adds <literal>perl-</literal> to
the start of the name attribute, so the package above is actually
called <literal>perl-Class-C3-0.21</literal>. So to install it, you
can say:
<screen>
$ nix-env -i perl-Class-C3
</screen>
(Of course you can also install using the attribute name: <literal>nix-env -i
-A perlPackages.ClassC3</literal>.)
</para>
<para>
So what does <varname>buildPerlPackage</varname> do? It does the following:
<orderedlist>
<listitem>
<para>
In the configure phase, it calls <literal>perl Makefile.PL</literal> to
generate a Makefile. You can set the variable
<varname>makeMakerFlags</varname> to pass flags to
<filename>Makefile.PL</filename>
</para>
</listitem>
<listitem>
<para>
It adds the contents of the <envar>PERL5LIB</envar> environment variable
to <literal>#! .../bin/perl</literal> line of Perl scripts as
<literal>-I<replaceable>dir</replaceable></literal> flags. This ensures
that a script can find its dependencies.
</para>
</listitem>
<listitem>
<para>
In the fixup phase, it writes the propagated build inputs
(<varname>propagatedBuildInputs</varname>) to the file
<filename>$out/nix-support/propagated-user-env-packages</filename>.
<command>nix-env</command> recursively installs all packages listed in
this file when you install a package that has it. This ensures that a Perl
package can find its dependencies.
</para>
</listitem>
</orderedlist>
</para>
(Of course you can also install using the attribute name:
<literal>nix-env -i -A perlPackages.ClassC3</literal>.)</para>
<para>So what does <varname>buildPerlPackage</varname> do? It does
the following:
<orderedlist>
<listitem><para>In the configure phase, it calls <literal>perl
Makefile.PL</literal> to generate a Makefile. You can set the
variable <varname>makeMakerFlags</varname> to pass flags to
<filename>Makefile.PL</filename></para></listitem>
<listitem><para>It adds the contents of the <envar>PERL5LIB</envar>
environment variable to <literal>#! .../bin/perl</literal> line of
Perl scripts as <literal>-I<replaceable>dir</replaceable></literal>
flags. This ensures that a script can find its
dependencies.</para></listitem>
<listitem><para>In the fixup phase, it writes the propagated build
inputs (<varname>propagatedBuildInputs</varname>) to the file
<filename>$out/nix-support/propagated-user-env-packages</filename>.
<command>nix-env</command> recursively installs all packages listed
in this file when you install a package that has it. This ensures
that a Perl package can find its dependencies.</para></listitem>
</orderedlist>
</para>
<para><varname>buildPerlPackage</varname> is built on top of
<varname>stdenv</varname>, so everything can be customised in the
usual way. For instance, the <literal>BerkeleyDB</literal> module has
a <varname>preConfigure</varname> hook to generate a configuration
file used by <filename>Makefile.PL</filename>:
<para>
<varname>buildPerlPackage</varname> is built on top of
<varname>stdenv</varname>, so everything can be customised in the usual way.
For instance, the <literal>BerkeleyDB</literal> module has a
<varname>preConfigure</varname> hook to generate a configuration file used by
<filename>Makefile.PL</filename>:
<programlisting>
{ buildPerlPackage, fetchurl, db }:
@@ -109,20 +108,23 @@ buildPerlPackage rec {
};
preConfigure = ''
echo "LIB = ${db.out}/lib" > config.in
echo "INCLUDE = ${db.dev}/include" >> config.in
echo "LIB = ${db}/lib" > config.in
echo "INCLUDE = ${db}/include" >> config.in
'';
}
</programlisting>
</para>
<para>
Dependencies on other Perl packages can be specified in the
<varname>buildInputs</varname> and <varname>propagatedBuildInputs</varname>
attributes. If something is exclusively a build-time dependency, use
<varname>buildInputs</varname>; if its (also) a runtime dependency, use
<varname>propagatedBuildInputs</varname>. For instance, this builds a Perl
module that has runtime dependencies on a bunch of other modules:
</para>
<para>Dependencies on other Perl packages can be specified in the
<varname>buildInputs</varname> and
<varname>propagatedBuildInputs</varname> attributes. If something is
exclusively a build-time dependency, use
<varname>buildInputs</varname>; if its (also) a runtime dependency,
use <varname>propagatedBuildInputs</varname>. For instance, this
builds a Perl module that has runtime dependencies on a bunch of other
modules:
<programlisting>
ClassC3Componentised = buildPerlPackage rec {
name = "Class-C3-Componentised-1.0004";
@@ -135,58 +137,45 @@ ClassC3Componentised = buildPerlPackage rec {
];
};
</programlisting>
</para>
<section xml:id="ssec-generation-from-CPAN">
<title>Generation from CPAN</title>
</para>
<para>
Nix expressions for Perl packages can be generated (almost) automatically
from CPAN. This is done by the program
<command>nix-generate-from-cpan</command>, which can be installed as
follows:
</para>
<section xml:id="ssec-generation-from-CPAN"><title>Generation from CPAN</title>
<para>Nix expressions for Perl packages can be generated (almost)
automatically from CPAN. This is done by the program
<command>nix-generate-from-cpan</command>, which can be installed
as follows:</para>
<screen>
$ nix-env -i nix-generate-from-cpan
</screen>
<para>
This program takes a Perl module name, looks it up on CPAN, fetches and
unpacks the corresponding package, and prints a Nix expression on standard
output. For example:
<para>This program takes a Perl module name, looks it up on CPAN,
fetches and unpacks the corresponding package, and prints a Nix
expression on standard output. For example:
<screen>
$ nix-generate-from-cpan XML::Simple
XMLSimple = buildPerlPackage rec {
name = "XML-Simple-2.22";
XMLSimple = buildPerlPackage {
name = "XML-Simple-2.20";
src = fetchurl {
url = "mirror://cpan/authors/id/G/GR/GRANTM/${name}.tar.gz";
sha256 = "b9450ef22ea9644ae5d6ada086dc4300fa105be050a2030ebd4efd28c198eb49";
url = mirror://cpan/authors/id/G/GR/GRANTM/XML-Simple-2.20.tar.gz;
sha256 = "5cff13d0802792da1eb45895ce1be461903d98ec97c9c953bc8406af7294434a";
};
propagatedBuildInputs = [ XMLNamespaceSupport XMLSAX XMLSAXExpat ];
meta = {
description = "An API for simple XML files";
license = with stdenv.lib.licenses; [ artistic1 gpl1Plus ];
description = "Easily read/write XML (esp config files)";
license = "perl";
};
};
</screen>
The output can be pasted into
<filename>pkgs/top-level/perl-packages.nix</filename> or wherever else you
need it.
</para>
</section>
<section xml:id="ssec-perl-cross-compilation">
<title>Cross-compiling modules</title>
The output can be pasted into
<filename>pkgs/top-level/perl-packages.nix</filename> or wherever else
you need it.</para>
<para>
Nixpkgs has experimental support for cross-compiling Perl modules. In many
cases, it will just work out of the box, even for modules with native
extensions. Sometimes, however, the Makefile.PL for a module may
(indirectly) import a native module. In that case, you will need to make a
stub for that module that will satisfy the Makefile.PL and install it into
<filename>lib/perl5/site_perl/cross_perl/${perl.version}</filename>. See the
<varname>postInstall</varname> for <varname>DBI</varname> for an example.
</para>
</section>
</section>
</section>

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,447 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="sec-python">
<title>Python</title>
<para>
Currently supported interpreters are <varname>python26</varname>, <varname>python27</varname>,
<varname>python33</varname>, <varname>python34</varname>, <varname>python35</varname>
and <varname>pypy</varname>.
</para>
<para>
<varname>python</varname> is an alias to <varname>python27</varname> and <varname>python3</varname> is an alias to <varname>python34</varname>.
</para>
<para>
<varname>python26</varname> and <varname>python27</varname> do not include modules that require
external dependencies (to reduce dependency bloat). Following modules need to be added as
<varname>buildInput</varname> explicitly:
</para>
<itemizedlist>
<listitem><para><varname>python.modules.bsddb</varname></para></listitem>
<listitem><para><varname>python.modules.curses</varname></para></listitem>
<listitem><para><varname>python.modules.curses_panel</varname></para></listitem>
<listitem><para><varname>python.modules.crypt</varname></para></listitem>
<listitem><para><varname>python.modules.gdbm</varname></para></listitem>
<listitem><para><varname>python.modules.sqlite3</varname></para></listitem>
<listitem><para><varname>python.modules.tkinter</varname></para></listitem>
<listitem><para><varname>python.modules.readline</varname></para></listitem>
</itemizedlist>
<para>For convenience <varname>python27Full</varname> and <varname>python26Full</varname>
are provided with all modules included.</para>
<para>
Python packages that
use <link xlink:href="http://pypi.python.org/pypi/setuptools/"><literal>setuptools</literal></link> or <literal>distutils</literal>,
can be built using the <varname>buildPythonPackage</varname> function as documented below.
</para>
<para>
All packages depending on any Python interpreter get appended <varname>$out/${python.sitePackages}</varname>
to <literal>$PYTHONPATH</literal> if such directory exists.
</para>
<variablelist>
<title>
Useful attributes on interpreters packages:
</title>
<varlistentry>
<term><varname>libPrefix</varname></term>
<listitem><para>
Name of the folder in <literal>${python}/lib/</literal> for corresponding interpreter.
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>interpreter</varname></term>
<listitem><para>
Alias for <literal>${python}/bin/${executable}.</literal>
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>buildEnv</varname></term>
<listitem><para>
Function to build python interpreter environments with extra packages bundled together.
See <xref linkend="ssec-python-build-env" /> for usage and documentation.
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>sitePackages</varname></term>
<listitem><para>
Alias for <literal>lib/${libPrefix}/site-packages</literal>.
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>executable</varname></term>
<listitem><para>
Name of the interpreter executable, ie <literal>python3.4</literal>.
</para></listitem>
</varlistentry>
</variablelist>
<section xml:id="ssec-build-python-package"><title><varname>buildPythonPackage</varname> function</title>
<para>
The function is implemented in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/python-modules/generic/default.nix">
<filename>pkgs/development/python-modules/generic/default.nix</filename></link>.
Example usage:
<programlisting language="nix">
twisted = buildPythonPackage {
name = "twisted-8.1.0";
src = pkgs.fetchurl {
url = http://tmrc.mit.edu/mirror/twisted/Twisted/8.1/Twisted-8.1.0.tar.bz2;
sha256 = "0q25zbr4xzknaghha72mq57kh53qw1bf8csgp63pm9sfi72qhirl";
};
propagatedBuildInputs = [ self.ZopeInterface ];
meta = {
homepage = http://twistedmatrix.com/;
description = "Twisted, an event-driven networking engine written in Python";
license = stdenv.lib.licenses.mit;
};
};
</programlisting>
Most of Python packages that use <varname>buildPythonPackage</varname> are defined
in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/python-packages.nix"><filename>pkgs/top-level/python-packages.nix</filename></link>
and generated for each python interpreter separately into attribute sets <varname>python26Packages</varname>,
<varname>python27Packages</varname>, <varname>python35Packages</varname>, <varname>python33Packages</varname>,
<varname>python34Packages</varname> and <varname>pypyPackages</varname>.
</para>
<para>
<function>buildPythonPackage</function> mainly does four things:
<orderedlist>
<listitem><para>
In the <varname>buildPhase</varname>, it calls
<literal>${python.interpreter} setup.py bdist_wheel</literal> to build a wheel binary zipfile.
</para></listitem>
<listitem><para>
In the <varname>installPhase</varname>, it installs the wheel file using
<literal>pip install *.whl</literal>.
</para></listitem>
<listitem><para>
In the <varname>postFixup</varname> phase, <literal>wrapPythonPrograms</literal>
bash function is called to wrap all programs in <filename>$out/bin/*</filename>
directory to include <literal>$PYTHONPATH</literal> and <literal>$PATH</literal>
environment variables.
</para></listitem>
<listitem><para>
In the <varname>installCheck</varname> phase, <literal>${python.interpreter} setup.py test</literal>
is ran.
</para></listitem>
</orderedlist>
</para>
<para>By default <varname>doCheck = true</varname> is set</para>
<para>
As in Perl, dependencies on other Python packages can be specified in the
<varname>buildInputs</varname> and
<varname>propagatedBuildInputs</varname> attributes. If something is
exclusively a build-time dependency, use
<varname>buildInputs</varname>; if its (also) a runtime dependency,
use <varname>propagatedBuildInputs</varname>.
</para>
<para>
By default <varname>meta.platforms</varname> is set to the same value
as the interpreter unless overriden otherwise.
</para>
<variablelist>
<title>
<varname>buildPythonPackage</varname> parameters
(all parameters from <varname>mkDerivation</varname> function are still supported)
</title>
<varlistentry>
<term><varname>namePrefix</varname></term>
<listitem><para>
Prepended text to <varname>${name}</varname> parameter.
Defaults to <literal>"python3.3-"</literal> for Python 3.3, etc. Set it to
<literal>""</literal>
if you're packaging an application or a command line tool.
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>disabled</varname></term>
<listitem><para>
If <varname>true</varname>, package is not build for
particular python interpreter version. Grep around
<filename>pkgs/top-level/python-packages.nix</filename>
for examples.
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>setupPyBuildFlags</varname></term>
<listitem><para>
List of flags passed to <command>setup.py build_ext</command> command.
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>pythonPath</varname></term>
<listitem><para>
List of packages to be added into <literal>$PYTHONPATH</literal>.
Packages in <varname>pythonPath</varname> are not propagated
(contrary to <varname>propagatedBuildInputs</varname>).
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>preShellHook</varname></term>
<listitem><para>
Hook to execute commands before <varname>shellHook</varname>.
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>postShellHook</varname></term>
<listitem><para>
Hook to execute commands after <varname>shellHook</varname>.
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>makeWrapperArgs</varname></term>
<listitem><para>
A list of strings. Arguments to be passed to
<varname>makeWrapper</varname>, which wraps generated binaries. By
default, the arguments to <varname>makeWrapper</varname> set
<varname>PATH</varname> and <varname>PYTHONPATH</varname> environment
variables before calling the binary. Additional arguments here can
allow a developer to set environment variables which will be
available when the binary is run. For example,
<varname>makeWrapperArgs = ["--set FOO BAR" "--set BAZ QUX"]</varname>.
</para></listitem>
</varlistentry>
</variablelist>
</section>
<section xml:id="ssec-python-build-env"><title><function>python.buildEnv</function> function</title>
<para>
Create Python environments using low-level <function>pkgs.buildEnv</function> function. Example <filename>default.nix</filename>:
<programlisting language="nix">
<![CDATA[with import <nixpkgs> {};
python.buildEnv.override {
extraLibs = [ pkgs.pythonPackages.pyramid ];
ignoreCollisions = true;
}]]>
</programlisting>
Running <command>nix-build</command> will create
<filename>/nix/store/cf1xhjwzmdki7fasgr4kz6di72ykicl5-python-2.7.8-env</filename>
with wrapped binaries in <filename>bin/</filename>.
</para>
<para>
You can also use <varname>env</varname> attribute to create local
environments with needed packages installed (somewhat comparable to
<literal>virtualenv</literal>). For example, with the following
<filename>shell.nix</filename>:
<programlisting language="nix">
<![CDATA[with import <nixpkgs> {};
(python3.buildEnv.override {
extraLibs = with python3Packages;
[ numpy
requests
];
}).env]]>
</programlisting>
Running <command>nix-shell</command> will drop you into a shell where
<command>python</command> will have specified packages in its path.
</para>
<variablelist>
<title>
<function>python.buildEnv</function> arguments
</title>
<varlistentry>
<term><varname>extraLibs</varname></term>
<listitem><para>
List of packages installed inside the environment.
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>postBuild</varname></term>
<listitem><para>
Shell command executed after the build of environment.
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>ignoreCollisions</varname></term>
<listitem><para>
Ignore file collisions inside the environment (default is <varname>false</varname>).
</para></listitem>
</varlistentry>
</variablelist>
</section>
<section xml:id="ssec-python-tools"><title>Tools</title>
<para>Packages inside nixpkgs are written by hand. However many tools
exist in community to help save time. No tool is preferred at the moment.
</para>
<itemizedlist>
<listitem><para>
<link xlink:href="https://github.com/proger/python2nix">python2nix</link>
by Vladimir Kirillov
</para></listitem>
<listitem><para>
<link xlink:href="https://github.com/garbas/pypi2nix">pypi2nix</link>
by Rok Garbas
</para></listitem>
<listitem><para>
<link xlink:href="https://github.com/offlinehacker/pypi2nix">pypi2nix</link>
by Jaka Hudoklin
</para></listitem>
</itemizedlist>
</section>
<section xml:id="ssec-python-development"><title>Development</title>
<para>
To develop Python packages <function>buildPythonPackage</function> has
additional logic inside <varname>shellPhase</varname> to run
<command>pip install -e . --prefix $TMPDIR/</command> for the package.
</para>
<warning><para><varname>shellPhase</varname> is executed only if <filename>setup.py</filename>
exists.</para></warning>
<para>
Given a <filename>default.nix</filename>:
<programlisting language="nix">
<![CDATA[with import <nixpkgs> {};
buildPythonPackage {
name = "myproject";
buildInputs = with pkgs.pythonPackages; [ pyramid ];
src = ./.;
}]]>
</programlisting>
Running <command>nix-shell</command> with no arguments should give you
the environment in which the package would be build with
<command>nix-build</command>.
</para>
<para>
Shortcut to setup environments with C headers/libraries and python packages:
<programlisting language="bash">$ nix-shell -p pythonPackages.pyramid zlib libjpeg git</programlisting>
</para>
<note><para>
There is a boolean value <varname>lib.inNixShell</varname> set to
<varname>true</varname> if nix-shell is invoked.
</para></note>
</section>
<section xml:id="ssec-python-faq"><title>FAQ</title>
<variablelist>
<varlistentry>
<term>How to solve circular dependencies?</term>
<listitem><para>
If you have packages <varname>A</varname> and <varname>B</varname> that
depend on each other, when packaging <varname>B</varname> override package
<varname>A</varname> not to depend on <varname>B</varname> as input
(and also the other way around).
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>install_data / data_files</varname> problems resulting into <literal>error: could not create '/nix/store/6l1bvljpy8gazlsw2aw9skwwp4pmvyxw-python-2.7.8/etc': Permission denied</literal></term>
<listitem><para>
<link xlink:href="https://bitbucket.org/pypa/setuptools/issue/130/install_data-doesnt-respect-prefix">
Known bug in setuptools <varname>install_data</varname> does not respect --prefix</link>. Example of
such package using the feature is <filename>pkgs/tools/X11/xpra/default.nix</filename>. As workaround
install it as an extra <varname>preInstall</varname> step:
<programlisting>${python.interpreter} setup.py install_data --install-dir=$out --root=$out
sed -i '/ = data_files/d' setup.py</programlisting>
</para></listitem>
</varlistentry>
<varlistentry>
<term>Rationale of non-existent global site-packages</term>
<listitem><para>
There is no need to have global site-packages in Nix. Each package has isolated
dependency tree and installing any python package will only populate <varname>$PATH</varname>
inside user environment. See <xref linkend="ssec-python-build-env" /> to create self-contained
interpreter with a set of packages.
</para></listitem>
</varlistentry>
</variablelist>
</section>
<section xml:id="ssec-python-contrib"><title>Contributing guidelines</title>
<para>
Following rules are desired to be respected:
</para>
<itemizedlist>
<listitem><para>
Make sure package builds for all python interpreters. Use <varname>disabled</varname> argument to
<function>buildPythonPackage</function> to set unsupported interpreters.
</para></listitem>
<listitem><para>
If tests need to be disabled for a package, make sure you leave a comment about reasoning.
</para></listitem>
<listitem><para>
Packages in <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/python-packages.nix"><filename>pkgs/top-level/python-packages.nix</filename></link>
are sorted quasi-alphabetically to avoid merge conflicts.
</para></listitem>
</itemizedlist>
</section>
</section>

View File

@@ -1,74 +1,70 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="sec-language-qt">
<title>Qt</title>
<para>
Qt is a comprehensive desktop and mobile application development toolkit for
C++. Legacy support is available for Qt 3 and Qt 4, but all current
development uses Qt 5. The Qt 5 packages in Nixpkgs are updated frequently to
take advantage of new features, but older versions are typically retained
until their support window ends. The most important consideration in
packaging Qt-based software is ensuring that each package and all its
dependencies use the same version of Qt 5; this consideration motivates most
of the tools described below.
</para>
<title>Qt</title>
<section xml:id="ssec-qt-libraries">
<title>Packaging Libraries for Nixpkgs</title>
<para>The information in this section applies to Qt 5.5 and later.</para>
<para>
Whenever possible, libraries that use Qt 5 should be built with each
available version. Packages providing libraries should be added to the
top-level function <varname>mkLibsForQt5</varname>, which is used to build a
set of libraries for every Qt 5 version. A special
<varname>callPackage</varname> function is used in this scope to ensure that
the entire dependency tree uses the same Qt 5 version. Import dependencies
unqualified, i.e., <literal>qtbase</literal> not
<literal>qt5.qtbase</literal>. <emphasis>Do not</emphasis> import a package
set such as <literal>qt5</literal> or <literal>libsForQt5</literal>.
</para>
<para>Qt is an application development toolkit for C++. Although it is
not a distinct programming language, there are special considerations
for packaging Qt-based programs and libraries. A small set of tools
and conventions has grown out of these considerations.</para>
<para>
If a library does not support a particular version of Qt 5, it is best to
mark it as broken by setting its <literal>meta.broken</literal> attribute. A
package may be marked broken for certain versions by testing the
<literal>qtbase.version</literal> attribute, which will always give the
current Qt 5 version.
</para>
</section>
<section xml:id="ssec-qt-libraries"><title>Libraries</title>
<section xml:id="ssec-qt-applications">
<title>Packaging Applications for Nixpkgs</title>
<para>Packages that provide libraries should be listed in
<varname>qt5LibsFun</varname> so that the library is built with each
Qt version. A set of packages is provided for each version of Qt; for
example, <varname>qt5Libs</varname> always provides libraries built
with the latest version, <varname>qt55Libs</varname> provides
libraries built with Qt 5.5, and so on. To avoid version conflicts, no
top-level attributes are created for these packages.</para>
<para>
Call your application expression using
<literal>libsForQt5.callPackage</literal> instead of
<literal>callPackage</literal>. Import dependencies unqualified, i.e.,
<literal>qtbase</literal> not <literal>qt5.qtbase</literal>. <emphasis>Do
not</emphasis> import a package set such as <literal>qt5</literal> or
<literal>libsForQt5</literal>.
</para>
<para>
Qt 5 maintains strict backward compatibility, so it is generally best to
build an application package against the latest version using the
<varname>libsForQt5</varname> library set. In case a package does not build
with the latest Qt version, it is possible to pick a set pinned to a
particular version, e.g. <varname>libsForQt55</varname> for Qt 5.5, if that
is the latest version the package supports. If a package must be pinned to
an older Qt version, be sure to file a bug upstream; because Qt is strictly
backwards-compatible, any incompatibility is by definition a bug in the
application.
</para>
<para>
When testing applications in Nixpkgs, it is a common practice to build the
package with <literal>nix-build</literal> and run it using the created
symbolic link. This will not work with Qt applications, however, because
they have many hard runtime requirements that can only be guaranteed if the
package is actually installed. To test a Qt application, install it with
<literal>nix-env</literal> or run it inside <literal>nix-shell</literal>.
</para>
</section>
</section>
<section xml:id="ssec-qt-programs"><title>Programs</title>
<para>Application packages do not need to be built with every Qt
version. To ensure consistency between the package's dependencies,
call the package with <literal>qt5Libs.callPackage</literal> instead
of the usual <literal>callPackage</literal>. An older version may be
selected in case of incompatibility. For example, to build with Qt
5.5, call the package with
<literal>qt55Libs.callPackage</literal>.</para>
<para>Several environment variables must be set at runtime for Qt
applications to function correctly, including:</para>
<itemizedlist>
<listitem><para><envar>QT_PLUGIN_PATH</envar></para></listitem>
<listitem><para><envar>QML_IMPORT_PATH</envar></para></listitem>
<listitem><para><envar>QML2_IMPORT_PATH</envar></para></listitem>
<listitem><para><envar>XDG_DATA_DIRS</envar></para></listitem>
</itemizedlist>
<para>To ensure that these are set correctly, the program must be wrapped by
invoking <literal>wrapQtProgram <replaceable>program</replaceable></literal>
during installation (for example, during
<literal>fixupPhase</literal>). <literal>wrapQtProgram</literal>
accepts the same options as <literal>makeWrapper</literal>.
</para>
</section>
<section xml:id="ssec-qt-kde"><title>KDE</title>
<para>Many of the considerations above also apply to KDE packages,
especially the need to set the correct environment variables at
runtime. To ensure that this is done, invoke <literal>wrapKDEProgram
<replaceable>program</replaceable></literal> during
installation. <literal>wrapKDEProgram</literal> also generates a
<literal>ksycoca</literal> database so that required data and services
can be found. Like its Qt counterpart,
<literal>wrapKDEProgram</literal> accepts the same options as
<literal>makeWrapper</literal>.</para>
</section>
</section>

View File

@@ -1,120 +0,0 @@
R packages
==========
## Installation
Define an environment for R that contains all the libraries that you'd like to
use by adding the following snippet to your $HOME/.config/nixpkgs/config.nix file:
```nix
{
packageOverrides = super: let self = super.pkgs; in
{
rEnv = super.rWrapper.override {
packages = with self.rPackages; [
devtools
ggplot2
reshape2
yaml
optparse
];
};
};
}
```
Then you can use `nix-env -f "<nixpkgs>" -iA rEnv` to install it into your user
profile. The set of available libraries can be discovered by running the
command `nix-env -f "<nixpkgs>" -qaP -A rPackages`. The first column from that
output is the name that has to be passed to rWrapper in the code snipped above.
However, if you'd like to add a file to your project source to make the
environment available for other contributors, you can create a `default.nix`
file like so:
```nix
let
pkgs = import <nixpkgs> {};
stdenv = pkgs.stdenv;
in with pkgs; {
myProject = stdenv.mkDerivation {
name = "myProject";
version = "1";
src = if pkgs.lib.inNixShell then null else nix;
buildInputs = with rPackages; [
R
ggplot2
knitr
];
};
}
```
and then run `nix-shell .` to be dropped into a shell with those packages
available.
## RStudio
RStudio uses a standard set of packages and ignores any custom R
environments or installed packages you may have. To create a custom
environment, see `rstudioWrapper`, which functions similarly to
`rWrapper`:
```nix
{
packageOverrides = super: let self = super.pkgs; in
{
rstudioEnv = super.rstudioWrapper.override {
packages = with self.rPackages; [
dplyr
ggplot2
reshape2
];
};
};
}
```
Then like above, `nix-env -f "<nixpkgs>" -iA rstudioEnv` will install
this into your user profile.
Alternatively, you can create a self-contained `shell.nix` without the need to
modify any configuration files:
```nix
{ pkgs ? import <nixpkgs> {}
}:
pkgs.rstudioWrapper.override {
packages = with pkgs.rPackages; [ dplyr ggplot2 reshape2 ];
}
```
Executing `nix-shell` will then drop you into an environment equivalent to the
one above. If you need additional packages just add them to the list and
re-enter the shell.
## Updating the package set
```bash
nix-shell generate-shell.nix
Rscript generate-r-packages.R cran > cran-packages.nix.new
mv cran-packages.nix.new cran-packages.nix
Rscript generate-r-packages.R bioc > bioc-packages.nix.new
mv bioc-packages.nix.new bioc-packages.nix
```
`generate-r-packages.R <repo>` reads `<repo>-packages.nix`, therefor the renaming.
## Testing if the Nix-expression could be evaluated
```bash
nix-build test-evaluation.nix --dry-run
```
If this exits fine, the expression is ok. If not, you have to edit `default.nix`

View File

@@ -1,41 +1,36 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="sec-language-ruby">
<title>Ruby</title>
<para>
There currently is support to bundle applications that are packaged as Ruby
gems. The utility "bundix" allows you to write a
<filename>Gemfile</filename>, let bundler create a
<filename>Gemfile.lock</filename>, and then convert this into a nix
expression that contains all Gem dependencies automatically.
</para>
<title>Ruby</title>
<para>
For example, to package sensu, we did:
</para>
<para>There currently is support to bundle applications that are packaged as Ruby gems. The utility "bundix" allows you to write a <filename>Gemfile</filename>, let bundler create a <filename>Gemfile.lock</filename>, and then convert
this into a nix expression that contains all Gem dependencies automatically.</para>
<para>For example, to package sensu, we did:</para>
<screen>
<![CDATA[$ cd pkgs/servers/monitoring
$ mkdir sensu
$ cd sensu
$ cat > Gemfile
source 'https://rubygems.org'
gem 'sensu'
$ $(nix-build '<nixpkgs>' -A bundix --no-out-link)/bin/bundix --magic
$ bundler package --path /tmp/vendor/bundle
$ $(nix-build '<nixpkgs>' -A bundix)/bin/bundix
$ cat > default.nix
{ lib, bundlerEnv, ruby }:
bundlerEnv rec {
name = "sensu-${version}";
bundlerEnv {
name = "sensu-0.17.1";
version = (import gemset).sensu.version;
inherit ruby;
# expects Gemfile, Gemfile.lock and gemset.nix in the same directory
gemdir = ./.;
gemfile = ./Gemfile;
lockfile = ./Gemfile.lock;
gemset = ./gemset.nix;
meta = with lib; {
description = "A monitoring framework that aims to be simple, malleable, and scalable";
description = "A monitoring framework that aims to be simple, malleable,
and scalable";
homepage = http://sensuapp.org/;
license = with licenses; mit;
maintainers = with maintainers; [ theuni ];
@@ -44,62 +39,15 @@ bundlerEnv rec {
}]]>
</screen>
<para>
Please check in the <filename>Gemfile</filename>,
<filename>Gemfile.lock</filename> and the <filename>gemset.nix</filename> so
future updates can be run easily.
</para>
<para>Please check in the <filename>Gemfile</filename>, <filename>Gemfile.lock</filename> and the <filename>gemset.nix</filename> so future updates can be run easily.
</para>
<para>
For tools written in Ruby - i.e. where the desire is to install a package and
then execute e.g. <command>rake</command> at the command line, there is an
alternative builder called <literal>bundlerApp</literal>. Set up the
<filename>gemset.nix</filename> the same way, and then, for example:
</para>
<screen>
<![CDATA[{ lib, bundlerApp }:
bundlerApp {
pname = "corundum";
gemdir = ./.;
exes = [ "corundum-skel" ];
meta = with lib; {
description = "Tool and libraries for maintaining Ruby gems.";
homepage = https://github.com/nyarly/corundum;
license = licenses.mit;
maintainers = [ maintainers.nyarly ];
platforms = platforms.unix;
};
}]]>
</screen>
<para>
The chief advantage of <literal>bundlerApp</literal> over
<literal>bundlerEnv</literal> is the executables introduced in the
environment are precisely those selected in the <literal>exes</literal> list,
as opposed to <literal>bundlerEnv</literal> which adds all the executables
made available by gems in the gemset, which can mean e.g.
<command>rspec</command> or <command>rake</command> in unpredictable versions
available from various packages.
</para>
<para>
Resulting derivations for both builders also have two helpful attributes,
<literal>env</literal> and <literal>wrappedRuby</literal>. The first one
allows one to quickly drop into <command>nix-shell</command> with the
specified environment present. E.g. <command>nix-shell -A sensu.env</command>
would give you an environment with Ruby preset so it has all the libraries
necessary for <literal>sensu</literal> in its paths. The second one can be
used to make derivations from custom Ruby scripts which have
<filename>Gemfile</filename>s with their dependencies specified. It is a
derivation with <command>ruby</command> wrapped so it can find all the needed
dependencies. For example, to make a derivation <literal>my-script</literal>
for a <filename>my-script.rb</filename> (which should be placed in
<filename>bin</filename>) you should run <command>bundix</command> as
specified above and then use <literal>bundlerEnv</literal> like this:
</para>
<para>Resulting derivations also have two helpful items, <literal>env</literal> and <literal>wrapper</literal>. The first one allows one to quickly drop into
<command>nix-shell</command> with the specified environment present. E.g. <command>nix-shell -A sensu.env</command> would give you an environment with Ruby preset
so it has all the libraries necessary for <literal>sensu</literal> in its paths. The second one can be used to make derivations from custom Ruby scripts which have
<filename>Gemfile</filename>s with their dependencies specified. It is a derivation with <command>ruby</command> wrapped so it can find all the needed dependencies.
For example, to make a derivation <literal>my-script</literal> for a <filename>my-script.rb</filename> (which should be placed in <filename>bin</filename>) you should
run <command>bundix</command> as specified above and then use <literal>bundlerEnv</literal> lile this:</para>
<programlisting>
<![CDATA[let env = bundlerEnv {
@@ -113,12 +61,18 @@ bundlerApp {
in stdenv.mkDerivation {
name = "my-script";
buildInputs = [ env.wrappedRuby ];
buildInputs = [ env.wrapper ];
script = ./my-script.rb;
buildCommand = ''
mkdir -p $out/bin
install -D -m755 $script $out/bin/my-script
patchShebangs $out/bin/my-script
'';
}]]>
</programlisting>
</section>

View File

@@ -1,395 +0,0 @@
---
title: Rust
author: Matthias Beyer
date: 2017-03-05
---
# User's Guide to the Rust Infrastructure
To install the rust compiler and cargo put
```
rustc
cargo
```
into the `environment.systemPackages` or bring them into
scope with `nix-shell -p rustc cargo`.
> If you are using NixOS and you want to use rust without a nix expression you
> probably want to add the following in your `configuration.nix` to build
> crates with C dependencies.
>
> environment.systemPackages = [binutils gcc gnumake openssl pkgconfig]
For daily builds (beta and nightly) use either rustup from
nixpkgs or use the [Rust nightlies
overlay](#using-the-rust-nightlies-overlay).
## Compiling Rust applications with Cargo
Rust applications are packaged by using the `buildRustPackage` helper from `rustPlatform`:
```
rustPlatform.buildRustPackage rec {
name = "ripgrep-${version}";
version = "0.4.0";
src = fetchFromGitHub {
owner = "BurntSushi";
repo = "ripgrep";
rev = "${version}";
sha256 = "0y5d1n6hkw85jb3rblcxqas2fp82h3nghssa4xqrhqnz25l799pj";
};
cargoSha256 = "0q68qyl2h6i0qsz82z840myxlnjay8p1w5z7hfyr8fqp7wgwa9cx";
meta = with stdenv.lib; {
description = "A fast line-oriented regex search tool, similar to ag and ack";
homepage = https://github.com/BurntSushi/ripgrep;
license = licenses.unlicense;
maintainers = [ maintainers.tailhook ];
platforms = platforms.all;
};
}
```
`buildRustPackage` requires a `cargoSha256` attribute which is computed over
all crate sources of this package. Currently it is obtained by inserting a
fake checksum into the expression and building the package once. The correct
checksum can be then take from the failed build.
When the `Cargo.lock`, provided by upstream, is not in sync with the
`Cargo.toml`, it is possible to use `cargoPatches` to update it. All patches
added in `cargoPatches` will also be prepended to the patches in `patches` at
build-time.
## Compiling Rust crates using Nix instead of Cargo
### Simple operation
When run, `cargo build` produces a file called `Cargo.lock`,
containing pinned versions of all dependencies. Nixpkgs contains a
tool called `carnix` (`nix-env -iA nixos.carnix`), which can be used
to turn a `Cargo.lock` into a Nix expression.
That Nix expression calls `rustc` directly (hence bypassing Cargo),
and can be used to compile a crate and all its dependencies. Here is
an example for a minimal `hello` crate:
$ cargo new hello
$ cd hello
$ cargo build
Compiling hello v0.1.0 (file:///tmp/hello)
Finished dev [unoptimized + debuginfo] target(s) in 0.20 secs
$ carnix -o hello.nix --src ./. Cargo.lock --standalone
$ nix-build hello.nix -A hello_0_1_0
Now, the file produced by the call to `carnix`, called `hello.nix`, looks like:
```
# Generated by carnix 0.6.5: carnix -o hello.nix --src ./. Cargo.lock --standalone
{ lib, stdenv, buildRustCrate, fetchgit }:
let kernel = stdenv.buildPlatform.parsed.kernel.name;
# ... (content skipped)
in
rec {
hello = f: hello_0_1_0 { features = hello_0_1_0_features { hello_0_1_0 = f; }; };
hello_0_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
crateName = "hello";
version = "0.1.0";
authors = [ "pe@pijul.org <pe@pijul.org>" ];
src = ./.;
inherit dependencies buildDependencies features;
};
hello_0_1_0 = { features?(hello_0_1_0_features {}) }: hello_0_1_0_ {};
hello_0_1_0_features = f: updateFeatures f (rec {
hello_0_1_0.default = (f.hello_0_1_0.default or true);
}) [ ];
}
```
In particular, note that the argument given as `--src` is copied
verbatim to the source. If we look at a more complicated
dependencies, for instance by adding a single line `libc="*"` to our
`Cargo.toml`, we first need to run `cargo build` to update the
`Cargo.lock`. Then, `carnix` needs to be run again, and produces the
following nix file:
```
# Generated by carnix 0.6.5: carnix -o hello.nix --src ./. Cargo.lock --standalone
{ lib, stdenv, buildRustCrate, fetchgit }:
let kernel = stdenv.buildPlatform.parsed.kernel.name;
# ... (content skipped)
in
rec {
hello = f: hello_0_1_0 { features = hello_0_1_0_features { hello_0_1_0 = f; }; };
hello_0_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
crateName = "hello";
version = "0.1.0";
authors = [ "pe@pijul.org <pe@pijul.org>" ];
src = ./.;
inherit dependencies buildDependencies features;
};
libc_0_2_36_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
crateName = "libc";
version = "0.2.36";
authors = [ "The Rust Project Developers" ];
sha256 = "01633h4yfqm0s302fm0dlba469bx8y6cs4nqc8bqrmjqxfxn515l";
inherit dependencies buildDependencies features;
};
hello_0_1_0 = { features?(hello_0_1_0_features {}) }: hello_0_1_0_ {
dependencies = mapFeatures features ([ libc_0_2_36 ]);
};
hello_0_1_0_features = f: updateFeatures f (rec {
hello_0_1_0.default = (f.hello_0_1_0.default or true);
libc_0_2_36.default = true;
}) [ libc_0_2_36_features ];
libc_0_2_36 = { features?(libc_0_2_36_features {}) }: libc_0_2_36_ {
features = mkFeatures (features.libc_0_2_36 or {});
};
libc_0_2_36_features = f: updateFeatures f (rec {
libc_0_2_36.default = (f.libc_0_2_36.default or true);
libc_0_2_36.use_std =
(f.libc_0_2_36.use_std or false) ||
(f.libc_0_2_36.default or false) ||
(libc_0_2_36.default or false);
}) [];
}
```
Here, the `libc` crate has no `src` attribute, so `buildRustCrate`
will fetch it from [crates.io](https://crates.io). A `sha256`
attribute is still needed for Nix purity.
### Handling external dependencies
Some crates require external libraries. For crates from
[crates.io](https://crates.io), such libraries can be specified in
`defaultCrateOverrides` package in nixpkgs itself.
Starting from that file, one can add more overrides, to add features
or build inputs by overriding the hello crate in a seperate file.
```
with import <nixpkgs> {};
((import ./hello.nix).hello {}).override {
crateOverrides = defaultCrateOverrides // {
hello = attrs: { buildInputs = [ openssl ]; };
};
}
```
Here, `crateOverrides` is expected to be a attribute set, where the
key is the crate name without version number and the value a function.
The function gets all attributes passed to `buildRustCrate` as first
argument and returns a set that contains all attribute that should be
overwritten.
For more complicated cases, such as when parts of the crate's
derivation depend on the the crate's version, the `attrs` argument of
the override above can be read, as in the following example, which
patches the derivation:
```
with import <nixpkgs> {};
((import ./hello.nix).hello {}).override {
crateOverrides = defaultCrateOverrides // {
hello = attrs: lib.optionalAttrs (lib.versionAtLeast attrs.version "1.0") {
postPatch = ''
substituteInPlace lib/zoneinfo.rs \
--replace "/usr/share/zoneinfo" "${tzdata}/share/zoneinfo"
'';
};
};
}
```
Another situation is when we want to override a nested
dependency. This actually works in the exact same way, since the
`crateOverrides` parameter is forwarded to the crate's
dependencies. For instance, to override the build inputs for crate
`libc` in the example above, where `libc` is a dependency of the main
crate, we could do:
```
with import <nixpkgs> {};
((import hello.nix).hello {}).override {
crateOverrides = defaultCrateOverrides // {
libc = attrs: { buildInputs = []; };
};
}
```
### Options and phases configuration
Actually, the overrides introduced in the previous section are more
general. A number of other parameters can be overridden:
- The version of rustc used to compile the crate:
```
(hello {}).override { rust = pkgs.rust; };
```
- Whether to build in release mode or debug mode (release mode by
default):
```
(hello {}).override { release = false; };
```
- Whether to print the commands sent to rustc when building
(equivalent to `--verbose` in cargo:
```
(hello {}).override { verbose = false; };
```
- Extra arguments to be passed to `rustc`:
```
(hello {}).override { extraRustcOpts = "-Z debuginfo=2"; };
```
- Phases, just like in any other derivation, can be specified using
the following attributes: `preUnpack`, `postUnpack`, `prePatch`,
`patches`, `postPatch`, `preConfigure` (in the case of a Rust crate,
this is run before calling the "build" script), `postConfigure`
(after the "build" script),`preBuild`, `postBuild`, `preInstall` and
`postInstall`. As an example, here is how to create a new module
before running the build script:
```
(hello {}).override {
preConfigure = ''
echo "pub const PATH=\"${hi.out}\";" >> src/path.rs"
'';
};
```
### Features
One can also supply features switches. For example, if we want to
compile `diesel_cli` only with the `postgres` feature, and no default
features, we would write:
```
(callPackage ./diesel.nix {}).diesel {
default = false;
postgres = true;
}
```
Where `diesel.nix` is the file generated by Carnix, as explained above.
## Setting Up `nix-shell`
Oftentimes you want to develop code from within `nix-shell`. Unfortunately
`buildRustCrate` does not support common `nix-shell` operations directly
(see [this issue](https://github.com/NixOS/nixpkgs/issues/37945))
so we will use `stdenv.mkDerivation` instead.
Using the example `hello` project above, we want to do the following:
- Have access to `cargo` and `rustc`
- Have the `openssl` library available to a crate through it's _normal_
compilation mechanism (`pkg-config`).
A typical `shell.nix` might look like:
```
with import <nixpkgs> {};
stdenv.mkDerivation {
name = "rust-env";
buildInputs = [
rustc cargo
# Example Additional Dependencies
pkgconfig openssl
];
# Set Environment Variables
RUST_BACKTRACE = 1;
}
```
You should now be able to run the following:
```
$ nix-shell --pure
$ cargo build
$ cargo test
```
### Controlling Rust Version Inside `nix-shell`
To control your rust version (i.e. use nightly) from within `shell.nix` (or
other nix expressions) you can use the following `shell.nix`
```
# Latest Nightly
with import <nixpkgs> {};
let src = fetchFromGitHub {
owner = "mozilla";
repo = "nixpkgs-mozilla";
# commit from: 2018-03-27
rev = "2945b0b6b2fd19e7d23bac695afd65e320efcebe";
sha256 = "034m1dryrzh2lmjvk3c0krgip652dql46w5yfwpvh7gavd3iypyw";
};
in
with import "${src.out}/rust-overlay.nix" pkgs pkgs;
stdenv.mkDerivation {
name = "rust-env";
buildInputs = [
# Note: to use use stable, just replace `nightly` with `stable`
latest.rustChannels.nightly.rust
# Add some extra dependencies from `pkgs`
pkgconfig openssl
];
# Set Environment Variables
RUST_BACKTRACE = 1;
}
```
Now run:
```
$ rustc --version
rustc 1.26.0-nightly (188e693b3 2018-03-26)
```
To see that you are using nightly.
## Using the Rust nightlies overlay
Mozilla provides an overlay for nixpkgs to bring a nightly version of Rust into scope.
This overlay can _also_ be used to install recent unstable or stable versions
of Rust, if desired.
To use this overlay, clone
[nixpkgs-mozilla](https://github.com/mozilla/nixpkgs-mozilla),
and create a symbolic link to the file
[rust-overlay.nix](https://github.com/mozilla/nixpkgs-mozilla/blob/master/rust-overlay.nix)
in the `~/.config/nixpkgs/overlays` directory.
$ git clone https://github.com/mozilla/nixpkgs-mozilla.git
$ mkdir -p ~/.config/nixpkgs/overlays
$ ln -s $(pwd)/nixpkgs-mozilla/rust-overlay.nix ~/.config/nixpkgs/overlays/rust-overlay.nix
The latest version can be installed with the following command:
$ nix-env -Ai nixos.latest.rustChannels.stable.rust
Or using the attribute with nix-shell:
$ nix-shell -p nixos.latest.rustChannels.stable.rust
To install the beta or nightly channel, "stable" should be substituted by
"nightly" or "beta", or
use the function provided by this overlay to pull a version based on a
build date.
The overlay automatically updates itself as it uses the same source as
[rustup](https://www.rustup.rs/).

View File

@@ -1,42 +1,27 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="sec-language-texlive">
<title>TeX Live</title>
<para>
Since release 15.09 there is a new TeX Live packaging that lives entirely
under attribute <varname>texlive</varname>.
</para>
<section xml:id="sec-language-texlive-users-guide">
<title>User's guide</title>
<title>TeX Live</title>
<para>Since release 15.09 there is a new TeX Live packaging that lives entirely under attribute <varname>texlive</varname>.</para>
<section><title>User's guide</title>
<itemizedlist>
<listitem>
<para>
For basic usage just pull <varname>texlive.combined.scheme-basic</varname>
for an environment with basic LaTeX support.
</para>
</listitem>
<listitem>
<para>
It typically won't work to use separately installed packages together.
Instead, you can build a custom set of packages like this:
<programlisting>
<listitem><para>
For basic usage just pull <varname>texlive.combined.scheme-basic</varname> for an environment with basic LaTeX support.</para></listitem>
<listitem><para>
It typically won't work to use separately installed packages together.
Instead, you can build a custom set of packages like this:
<programlisting>
texlive.combine {
inherit (texlive) scheme-small collection-langkorean algorithms cm-super;
}
</programlisting>
There are all the schemes, collections and a few thousand packages, as
defined upstream (perhaps with tiny differences).
</para>
</listitem>
<listitem>
<para>
By default you only get executables and files needed during runtime, and a
little documentation for the core packages. To change that, you need to
add <varname>pkgFilter</varname> function to <varname>combine</varname>.
<programlisting>
There are all the schemes, collections and a few thousand packages, as defined upstream (perhaps with tiny differences).
</para></listitem>
<listitem><para>
By default you only get executables and files needed during runtime, and a little documentation for the core packages. To change that, you need to add <varname>pkgFilter</varname> function to <varname>combine</varname>.
<programlisting>
texlive.combine {
# inherit (texlive) whatever-you-want;
pkgFilter = pkg:
@@ -45,55 +30,30 @@ texlive.combine {
# there are also other attributes: version, name
}
</programlisting>
</para>
</listitem>
<listitem>
<para>
You can list packages e.g. by <command>nix repl</command>.
<programlisting><![CDATA[
$ nix repl
nix-repl> :l <nixpkgs>
nix-repl> texlive.collection-<TAB>
]]></programlisting>
</para>
</listitem>
<listitem>
<para>
Note that the wrapper assumes that the result has a chance to be useful.
For example, the core executables should be present, as well as some core
data files. The supported way of ensuring this is by including some
scheme, for example <varname>scheme-basic</varname>, into the combination.
</para>
</listitem>
</para></listitem>
<listitem><para>
You can list packages e.g. by <command>nix-repl</command>.
<programlisting>
$ nix-repl
nix-repl> texlive.collection-&lt;TAB>
</programlisting>
</para></listitem>
</itemizedlist>
</section>
<section xml:id="sec-language-texlive-known-problems">
<title>Known problems</title>
<itemizedlist>
<listitem>
<para>
Some tools are still missing, e.g. luajittex;
</para>
</listitem>
<listitem>
<para>
some apps aren't packaged/tested yet (asymptote, biber, etc.);
</para>
</listitem>
<listitem>
<para>
feature/bug: when a package is rejected by <varname>pkgFilter</varname>,
its dependencies are still propagated;
</para>
</listitem>
<listitem>
<para>
in case of any bugs or feature requests, file a github issue or better a
pull request and /cc @vcunat.
</para>
</listitem>
</itemizedlist>
</section>
</section>
<section><title>Known problems</title>
<itemizedlist>
<listitem><para>
Some tools are still missing, e.g. luajittex;</para></listitem>
<listitem><para>
some apps aren't packaged/tested yet (asymptote, biber, etc.);</para></listitem>
<listitem><para>
feature/bug: when a package is rejected by <varname>pkgFilter</varname>, its dependencies are still propagated;</para></listitem>
<listitem><para>
in case of any bugs or feature requests, file a github issue or better a pull request and /cc @vcunat.</para></listitem>
</itemizedlist>
</section>
</section>

View File

@@ -1,193 +0,0 @@
---
title: User's Guide for Vim in Nixpkgs
author: Marc Weber
date: 2016-06-25
---
# User's Guide to Vim Plugins/Addons/Bundles/Scripts in Nixpkgs
Both Neovim and Vim can be configured to include your favorite plugins
and additional libraries.
Loading can be deferred; see examples.
At the moment we support three different methods for managing plugins:
- Vim packages (*recommend*)
- VAM (=vim-addon-manager)
- Pathogen
## Custom configuration
Adding custom .vimrc lines can be done using the following code:
```
vim_configurable.customize {
# `name` specifies the name of the executable and package
name = "vim-with-plugins";
vimrcConfig.customRC = ''
set hidden
'';
}
```
This configuration is used when vim is invoked with the command specified as name, in this case `vim-with-plugins`.
For Neovim the `configure` argument can be overridden to achieve the same:
```
neovim.override {
configure = {
customRC = ''
# here your custom configuration goes!
'';
};
}
```
## Managing plugins with Vim packages
To store you plugins in Vim packages the following example can be used:
```
vim_configurable.customize {
vimrcConfig.packages.myVimPackage = with pkgs.vimPlugins; {
# loaded on launch
start = [ youcompleteme fugitive ];
# manually loadable by calling `:packadd $plugin-name`
opt = [ phpCompletion elm-vim ];
# To automatically load a plugin when opening a filetype, add vimrc lines like:
# autocmd FileType php :packadd phpCompletion
};
}
```
For Neovim the syntax is
```
neovim.override {
configure = {
customRC = ''
# here your custom configuration goes!
'';
packages.myVimPackage = with pkgs.vimPlugins; {
# see examples below how to use custom packages
start = [ ];
opt = [ ];
};
};
}
```
The resulting package can be added to `packageOverrides` in `~/.nixpkgs/config.nix` to make it installable:
```
{
packageOverrides = pkgs: with pkgs; {
myVim = vim_configurable.customize {
# `name` specifies the name of the executable and package
name = "vim-with-plugins";
# add here code from the example section
};
myNeovim = neovim.override {
configure = {
# add here code from the example section
};
};
};
}
```
After that you can install your special grafted `myVim` or `myNeovim` packages.
## Managing plugins with VAM
### Handling dependencies of Vim plugins
VAM introduced .json files supporting dependencies without versioning
assuming that "using latest version" is ok most of the time.
### Example
First create a vim-scripts file having one plugin name per line. Example:
"tlib"
{'name': 'vim-addon-sql'}
{'filetype_regex': '\%(vim)$', 'names': ['reload', 'vim-dev-plugin']}
Such vim-scripts file can be read by VAM as well like this:
call vam#Scripts(expand('~/.vim-scripts'), {})
Create a default.nix file:
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }:
nixpkgs.vim_configurable.customize { name = "vim"; vimrcConfig.vam.pluginDictionaries = [ "vim-addon-vim2nix" ]; }
Create a generate.vim file:
ActivateAddons vim-addon-vim2nix
let vim_scripts = "vim-scripts"
call nix#ExportPluginsForNix({
\ 'path_to_nixpkgs': eval('{"'.substitute(substitute(substitute($NIX_PATH, ':', ',', 'g'), '=',':', 'g'), '\([:,]\)', '"\1"',"g").'"}')["nixpkgs"],
\ 'cache_file': '/tmp/vim2nix-cache',
\ 'try_catch': 0,
\ 'plugin_dictionaries': ["vim-addon-manager"]+map(readfile(vim_scripts), 'eval(v:val)')
\ })
Then run
nix-shell -p vimUtils.vim_with_vim2nix --command "vim -c 'source generate.vim'"
You should get a Vim buffer with the nix derivations (output1) and vam.pluginDictionaries (output2).
You can add your vim to your system's configuration file like this and start it by "vim-my":
my-vim =
let plugins = let inherit (vimUtils) buildVimPluginFrom2Nix; in {
copy paste output1 here
}; in vim_configurable.customize {
name = "vim-my";
vimrcConfig.vam.knownPlugins = plugins; # optional
vimrcConfig.vam.pluginDictionaries = [
copy paste output2 here
];
# Pathogen would be
# vimrcConfig.pathogen.knownPlugins = plugins; # plugins
# vimrcConfig.pathogen.pluginNames = ["tlib"];
};
Sample output1:
"reload" = buildVimPluginFrom2Nix { # created by nix#NixDerivation
name = "reload";
src = fetchgit {
url = "git://github.com/xolox/vim-reload";
rev = "0a601a668727f5b675cb1ddc19f6861f3f7ab9e1";
sha256 = "0vb832l9yxj919f5hfg6qj6bn9ni57gnjd3bj7zpq7d4iv2s4wdh";
};
dependencies = ["nim-misc"];
};
[...]
Sample output2:
[
''vim-addon-manager''
''tlib''
{ "name" = ''vim-addon-sql''; }
{ "filetype_regex" = ''\%(vim)$$''; "names" = [ ''reload'' ''vim-dev-plugin'' ]; }
]
## Important repositories
- [vim-pi](https://bitbucket.org/vimcommunity/vim-pi) is a plugin repository
from VAM plugin manager meant to be used by others as well used by
- [vim2nix](http://github.com/MarcWeber/vim-addon-vim2nix) which generates the
.nix code

View File

@@ -1,85 +0,0 @@
{ pkgs ? (import ./.. { }), nixpkgs ? { }}:
let
revision = pkgs.lib.trivial.revisionWithDefault (nixpkgs.revision or "master");
libDefPos = set:
builtins.map
(name: {
name = name;
location = builtins.unsafeGetAttrPos name set;
})
(builtins.attrNames set);
libset = toplib:
builtins.map
(subsetname: {
subsetname = subsetname;
functions = libDefPos toplib."${subsetname}";
})
(builtins.filter
(name: builtins.isAttrs toplib."${name}")
(builtins.attrNames toplib));
nixpkgsLib = pkgs.lib;
flattenedLibSubset = { subsetname, functions }:
builtins.map
(fn: {
name = "lib.${subsetname}.${fn.name}";
value = fn.location;
})
functions;
locatedlibsets = libs: builtins.map flattenedLibSubset (libset libs);
removeFilenamePrefix = prefix: filename:
let
prefixLen = (builtins.stringLength prefix) + 1; # +1 to remove the leading /
filenameLen = builtins.stringLength filename;
substr = builtins.substring prefixLen filenameLen filename;
in substr;
removeNixpkgs = removeFilenamePrefix (builtins.toString pkgs.path);
liblocations =
builtins.filter
(elem: elem.value != null)
(nixpkgsLib.lists.flatten
(locatedlibsets nixpkgsLib));
fnLocationRelative = { name, value }:
{
inherit name;
value = value // { file = removeNixpkgs value.file; };
};
relativeLocs = (builtins.map fnLocationRelative liblocations);
sanitizeId = builtins.replaceStrings
[ "'" ]
[ "-prime" ];
urlPrefix = "https://github.com/NixOS/nixpkgs/blob/${revision}";
xmlstrings = (nixpkgsLib.strings.concatMapStrings
({ name, value }:
''
<section><title>${name}</title>
<para xml:id="${sanitizeId name}">
Located at
<link
xlink:href="${urlPrefix}/${value.file}#L${builtins.toString value.line}">${value.file}:${builtins.toString value.line}</link>
in <literal>&lt;nixpkgs&gt;</literal>.
</para>
</section>
'')
relativeLocs);
in pkgs.writeText
"locations.xml"
''
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
version="5">
<title>All the locations for every lib function</title>
<para>This file is only for inclusion by other files.</para>
${xmlstrings}
</section>
''

View File

@@ -1,24 +1,26 @@
<book xmlns="http://docbook.org/ns/docbook"
xmlns:xi="http://www.w3.org/2001/XInclude">
<info>
<title>Nixpkgs Contributors Guide</title>
<subtitle>Version <xi:include href=".version" parse="text" />
</subtitle>
</info>
<xi:include href="introduction.chapter.xml" />
<xi:include href="quick-start.xml" />
<xi:include href="stdenv.xml" />
<xi:include href="multiple-output.xml" />
<xi:include href="cross-compilation.xml" />
<xi:include href="configuration.xml" />
<xi:include href="functions.xml" />
<xi:include href="meta.xml" />
<xi:include href="languages-frameworks/index.xml" />
<xi:include href="platform-notes.xml" />
<xi:include href="package-notes.xml" />
<xi:include href="overlays.xml" />
<xi:include href="coding-conventions.xml" />
<xi:include href="submitting-changes.xml" />
<xi:include href="reviewing-contributions.xml" />
<xi:include href="contributing.xml" />
<info>
<title>Nixpkgs Contributors Guide</title>
<subtitle>Version <xi:include href=".version" parse="text" /></subtitle>
</info>
<xi:include href="introduction.xml" />
<xi:include href="quick-start.xml" />
<xi:include href="stdenv.xml" />
<xi:include href="configuration.xml" />
<xi:include href="functions.xml" />
<xi:include href="meta.xml" />
<xi:include href="languages-frameworks/index.xml" />
<xi:include href="package-notes.xml" />
<xi:include href="coding-conventions.xml" />
<xi:include href="submitting-changes.xml" />
<xi:include href="haskell-users-guide.xml" />
<xi:include href="erlang-users-guide.xml" />
<xi:include href="contributing.xml" />
</book>

View File

@@ -1,34 +1,37 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="chap-meta">
<title>Meta-attributes</title>
<para>
Nix packages can declare <emphasis>meta-attributes</emphasis> that contain
information about a package such as a description, its homepage, its license,
and so on. For instance, the GNU Hello package has a <varname>meta</varname>
declaration like this:
<title>Meta-attributes</title>
<para>Nix packages can declare <emphasis>meta-attributes</emphasis>
that contain information about a package such as a description, its
homepage, its license, and so on. For instance, the GNU Hello package
has a <varname>meta</varname> declaration like this:
<programlisting>
meta = with stdenv.lib; {
meta = {
description = "A program that produces a familiar, friendly greeting";
longDescription = ''
GNU Hello is a program that prints "Hello, world!" when you run it.
It is fully customizable.
'';
homepage = http://www.gnu.org/software/hello/manual/;
license = licenses.gpl3Plus;
maintainers = [ maintainers.eelco ];
platforms = platforms.all;
license = stdenv.lib.licenses.gpl3Plus;
maintainers = [ stdenv.lib.maintainers.eelco ];
platforms = stdenv.lib.platforms.all;
};
</programlisting>
</para>
<para>
Meta-attributes are not passed to the builder of the package. Thus, a change
to a meta-attribute doesnt trigger a recompilation of the package. The
value of a meta-attribute must be a string.
</para>
<para>
The meta-attributes of a package can be queried from the command-line using
<command>nix-env</command>:
</para>
<para>Meta-attributes are not passed to the builder of the package.
Thus, a change to a meta-attribute doesnt trigger a recompilation of
the package. The value of a meta-attribute must be a string.</para>
<para>The meta-attributes of a package can be queried from the
command-line using <command>nix-env</command>:
<screen>
$ nix-env -qa hello --json
{
@@ -50,7 +53,7 @@ $ nix-env -qa hello --json
"x86_64-linux",
"armv5tel-linux",
"armv7l-linux",
"mips32-linux",
"mips64el-linux",
"x86_64-darwin",
"i686-cygwin",
"i686-freebsd",
@@ -67,311 +70,254 @@ $ nix-env -qa hello --json
</screen>
<command>nix-env</command> knows about the <varname>description</varname>
field specifically:
<command>nix-env</command> knows about the
<varname>description</varname> field specifically:
<screen>
$ nix-env -qa hello --description
hello-2.3 A program that produces a familiar, friendly greeting
</screen>
</para>
<section xml:id="sec-standard-meta-attributes">
<title>Standard meta-attributes</title>
<para>
It is expected that each meta-attribute is one of the following:
</para>
</para>
<section xml:id="sec-standard-meta-attributes"><title>Standard
meta-attributes</title>
<para>It is expected that each meta-attribute is one of the following:</para>
<variablelist>
<varlistentry>
<term><varname>description</varname></term>
<listitem><para>A short (one-line) description of the package.
This is shown by <command>nix-env -q --description</command> and
also on the Nixpkgs release pages.</para>
<para>Dont include a period at the end. Dont include newline
characters. Capitalise the first character. For brevity, dont
repeat the name of package — just describe what it does.</para>
<para>Wrong: <literal>"libpng is a library that allows you to decode PNG images."</literal></para>
<para>Right: <literal>"A library for decoding PNG images"</literal></para>
<variablelist>
<varlistentry>
<term>
<varname>description</varname>
</term>
<listitem>
<para>
A short (one-line) description of the package. This is shown by
<command>nix-env -q --description</command> and also on the Nixpkgs
release pages.
</para>
<para>
Dont include a period at the end. Dont include newline characters.
Capitalise the first character. For brevity, dont repeat the name of
package — just describe what it does.
</para>
<para>
Wrong: <literal>"libpng is a library that allows you to decode PNG
images."</literal>
</para>
<para>
Right: <literal>"A library for decoding PNG images"</literal>
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>longDescription</varname>
</term>
</varlistentry>
<varlistentry>
<term><varname>longDescription</varname></term>
<listitem><para>An arbitrarily long description of the
package.</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>branch</varname></term>
<listitem><para>Release branch. Used to specify that a package is not
going to receive updates that are not in this branch; for example, Linux
kernel 3.0 is supposed to be updated to 3.0.X, not 3.1.</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>homepage</varname></term>
<listitem><para>The packages homepage. Example:
<literal>http://www.gnu.org/software/hello/manual/</literal></para></listitem>
</varlistentry>
<varlistentry>
<term><varname>downloadPage</varname></term>
<listitem><para>The page where a link to the current version can be found. Example:
<literal>http://ftp.gnu.org/gnu/hello/</literal></para></listitem>
</varlistentry>
<varlistentry>
<term><varname>license</varname></term>
<listitem>
<para>
An arbitrarily long description of the package.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>branch</varname>
</term>
<listitem>
<para>
Release branch. Used to specify that a package is not going to receive
updates that are not in this branch; for example, Linux kernel 3.0 is
supposed to be updated to 3.0.X, not 3.1.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>homepage</varname>
</term>
<listitem>
<para>
The packages homepage. Example:
<literal>http://www.gnu.org/software/hello/manual/</literal>
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>downloadPage</varname>
</term>
<listitem>
<para>
The page where a link to the current version can be found. Example:
<literal>http://ftp.gnu.org/gnu/hello/</literal>
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>license</varname>
</term>
<listitem>
<para>
The license, or licenses, for the package. One from the attribute set
defined in
<link
<para>
The license, or licenses, for the package. One from the attribute set
defined in <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/lib/licenses.nix">
<filename>nixpkgs/lib/licenses.nix</filename></link>. At this moment
using both a list of licenses and a single license is valid. If the
license field is in the form of a list representation, then it means that
parts of the package are licensed differently. Each license should
preferably be referenced by their attribute. The non-list attribute value
can also be a space delimited string representation of the contained
attribute shortNames or spdxIds. The following are all valid examples:
<itemizedlist>
<listitem>
<para>
Single license referenced by attribute (preferred)
<literal>stdenv.lib.licenses.gpl3</literal>.
</para>
</listitem>
<listitem>
<para>
Single license referenced by its attribute shortName (frowned upon)
<literal>"gpl3"</literal>.
</para>
</listitem>
<listitem>
<para>
Single license referenced by its attribute spdxId (frowned upon)
<literal>"GPL-3.0"</literal>.
</para>
</listitem>
<listitem>
<para>
Multiple licenses referenced by attribute (preferred) <literal>with
stdenv.lib.licenses; [ asl20 free ofl ]</literal>.
</para>
</listitem>
<listitem>
<para>
Multiple licenses referenced as a space delimited string of attribute
shortNames (frowned upon) <literal>"asl20 free ofl"</literal>.
</para>
</listitem>
</itemizedlist>
For details, see <xref linkend='sec-meta-license'/>.
</para>
<filename>nixpkgs/lib/licenses.nix</filename></link>. At this moment
using both a list of licenses and a single license is valid. If the
license field is in the form of a list representation, then it means
that parts of the package are licensed differently. Each license
should preferably be referenced by their attribute. The non-list
attribute value can also be a space delimited string representation of
the contained attribute shortNames or spdxIds. The following are all valid
examples:
<itemizedlist>
<listitem><para>Single license referenced by attribute (preferred)
<literal>stdenv.lib.licenses.gpl3</literal>.
</para></listitem>
<listitem><para>Single license referenced by its attribute shortName (frowned upon)
<literal>"gpl3"</literal>.
</para></listitem>
<listitem><para>Single license referenced by its attribute spdxId (frowned upon)
<literal>"GPL-3.0"</literal>.
</para></listitem>
<listitem><para>Multiple licenses referenced by attribute (preferred)
<literal>with stdenv.lib.licenses; [ asl20 free ofl ]</literal>.
</para></listitem>
<listitem><para>Multiple licenses referenced as a space delimited string of attribute shortNames (frowned upon)
<literal>"asl20 free ofl"</literal>.
</para></listitem>
</itemizedlist>
For details, see <xref linkend='sec-meta-license'/>.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>maintainers</varname>
</term>
<listitem>
<para>
A list of names and e-mail addresses of the maintainers of this Nix
expression. If you would like to be a maintainer of a package, you may
want to add yourself to
<link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/maintainers/maintainer-list.nix"><filename>nixpkgs/maintainers/maintainer-list.nix</filename></link>
and write something like <literal>[ stdenv.lib.maintainers.alice
stdenv.lib.maintainers.bob ]</literal>.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>priority</varname>
</term>
<listitem>
<para>
The <emphasis>priority</emphasis> of the package, used by
<command>nix-env</command> to resolve file name conflicts between
packages. See the Nix manual page for <command>nix-env</command> for
details. Example: <literal>"10"</literal> (a low-priority package).
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>platforms</varname>
</term>
<listitem>
<para>
The list of Nix platform types on which the package is supported. Hydra
builds packages according to the platform specified. If no platform is
specified, the package does not have prebuilt binaries. An example is:
</varlistentry>
<varlistentry>
<term><varname>maintainers</varname></term>
<listitem><para>A list of names and e-mail addresses of the
maintainers of this Nix expression. If
you would like to be a maintainer of a package, you may want to add
yourself to <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/lib/maintainers.nix"><filename>nixpkgs/lib/maintainers.nix</filename></link>
and write something like <literal>[ stdenv.lib.maintainers.alice
stdenv.lib.maintainers.bob ]</literal>.</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>priority</varname></term>
<listitem><para>The <emphasis>priority</emphasis> of the package,
used by <command>nix-env</command> to resolve file name conflicts
between packages. See the Nix manual page for
<command>nix-env</command> for details. Example:
<literal>"10"</literal> (a low-priority
package).</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>platforms</varname></term>
<listitem><para>The list of Nix platform types on which the
package is supported. Hydra builds packages according to the
platform specified. If no platform is specified, the package does
not have prebuilt binaries. An example is:
<programlisting>
meta.platforms = stdenv.lib.platforms.linux;
</programlisting>
Attribute Set <varname>stdenv.lib.platforms</varname> defines
<link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/lib/systems/doubles.nix">
various common lists</link> of platforms types.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>hydraPlatforms</varname>
</term>
<listitem>
<para>
The list of Nix platform types for which the Hydra instance at
<literal>hydra.nixos.org</literal> will build the package. (Hydra is the
Nix-based continuous build system.) It defaults to the value of
<varname>meta.platforms</varname>. Thus, the only reason to set
<varname>meta.hydraPlatforms</varname> is if you want
<literal>hydra.nixos.org</literal> to build the package on a subset of
<varname>meta.platforms</varname>, or not at all, e.g.
Attribute Set <varname>stdenv.lib.platforms</varname> in
<link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/lib/platforms.nix">
<filename>nixpkgs/lib/platforms.nix</filename></link> defines various common
lists of platforms types.
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>hydraPlatforms</varname></term>
<listitem><para>The list of Nix platform types for which the Hydra
instance at <literal>hydra.nixos.org</literal> will build the
package. (Hydra is the Nix-based continuous build system.) It
defaults to the value of <varname>meta.platforms</varname>. Thus,
the only reason to set <varname>meta.hydraPlatforms</varname> is
if you want <literal>hydra.nixos.org</literal> to build the
package on a subset of <varname>meta.platforms</varname>, or not
at all, e.g.
<programlisting>
meta.platforms = stdenv.lib.platforms.linux;
meta.hydraPlatforms = [];
</programlisting>
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>broken</varname>
</term>
<listitem>
<para>
If set to <literal>true</literal>, the package is marked as “broken”,
meaning that it wont show up in <literal>nix-env -qa</literal>, and
cannot be built or installed. Such packages should be removed from
Nixpkgs eventually unless they are fixed.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>updateWalker</varname>
</term>
<listitem>
<para>
If set to <literal>true</literal>, the package is tested to be updated
correctly by the <literal>update-walker.sh</literal> script without
additional settings. Such packages have <varname>meta.version</varname>
set and their homepage (or the page specified by
<varname>meta.downloadPage</varname>) contains a direct link to the
package tarball.
</para>
</listitem>
</varlistentry>
</variablelist>
</section>
<section xml:id="sec-meta-license">
<title>Licenses</title>
<para>
The <varname>meta.license</varname> attribute should preferrably contain a
value from <varname>stdenv.lib.licenses</varname> defined in
<link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/lib/licenses.nix">
<filename>nixpkgs/lib/licenses.nix</filename></link>, or in-place license
description of the same format if the license is unlikely to be useful in
another expression.
</para>
</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>broken</varname></term>
<listitem><para>If set to <literal>true</literal>, the package is
marked as “broken”, meaning that it wont show up in
<literal>nix-env -qa</literal>, and cannot be built or installed.
Such packages should be removed from Nixpkgs eventually unless
they are fixed.</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>updateWalker</varname></term>
<listitem><para>If set to <literal>true</literal>, the package is
tested to be updated correctly by the <literal>update-walker.sh</literal>
script without additional settings. Such packages have
<varname>meta.version</varname> set and their homepage (or
the page specified by <varname>meta.downloadPage</varname>) contains
a direct link to the package tarball.</para></listitem>
</varlistentry>
</variablelist>
</section>
<section xml:id="sec-meta-license"><title>Licenses</title>
<para>The <varname>meta.license</varname> attribute should preferrably contain
a value from <varname>stdenv.lib.licenses</varname> defined in
<link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/lib/licenses.nix">
<filename>nixpkgs/lib/licenses.nix</filename></link>,
or in-place license description of the same format if the license is
unlikely to be useful in another expression.</para>
<para>Although it's typically better to indicate the specific license,
a few generic options are available:
<variablelist>
<varlistentry>
<term><varname>stdenv.lib.licenses.free</varname>,
<varname>"free"</varname></term>
<listitem><para>Catch-all for free software licenses not listed
above.</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>stdenv.lib.licenses.unfreeRedistributable</varname>,
<varname>"unfree-redistributable"</varname></term>
<listitem><para>Unfree package that can be redistributed in binary
form. That is, its legal to redistribute the
<emphasis>output</emphasis> of the derivation. This means that
the package can be included in the Nixpkgs
channel.</para>
<para>Sometimes proprietary software can only be redistributed
unmodified. Make sure the builder doesnt actually modify the
original binaries; otherwise were breaking the license. For
instance, the NVIDIA X11 drivers can be redistributed unmodified,
but our builder applies <command>patchelf</command> to make them
work. Thus, its license is <varname>"unfree"</varname> and it
cannot be included in the Nixpkgs channel.</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>stdenv.lib.licenses.unfree</varname>,
<varname>"unfree"</varname></term>
<listitem><para>Unfree package that cannot be redistributed. You
can build it yourself, but you cannot redistribute the output of
the derivation. Thus it cannot be included in the Nixpkgs
channel.</para></listitem>
</varlistentry>
<varlistentry>
<term><varname>stdenv.lib.licenses.unfreeRedistributableFirmware</varname>,
<varname>"unfree-redistributable-firmware"</varname></term>
<listitem><para>This package supplies unfree, redistributable
firmware. This is a separate value from
<varname>unfree-redistributable</varname> because not everybody
cares whether firmware is free.</para></listitem>
</varlistentry>
</variablelist>
</para>
</section>
<para>
Although it's typically better to indicate the specific license, a few
generic options are available:
<variablelist>
<varlistentry>
<term>
<varname>stdenv.lib.licenses.free</varname>, <varname>"free"</varname>
</term>
<listitem>
<para>
Catch-all for free software licenses not listed above.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>stdenv.lib.licenses.unfreeRedistributable</varname>, <varname>"unfree-redistributable"</varname>
</term>
<listitem>
<para>
Unfree package that can be redistributed in binary form. That is, its
legal to redistribute the <emphasis>output</emphasis> of the derivation.
This means that the package can be included in the Nixpkgs channel.
</para>
<para>
Sometimes proprietary software can only be redistributed unmodified.
Make sure the builder doesnt actually modify the original binaries;
otherwise were breaking the license. For instance, the NVIDIA X11
drivers can be redistributed unmodified, but our builder applies
<command>patchelf</command> to make them work. Thus, its license is
<varname>"unfree"</varname> and it cannot be included in the Nixpkgs
channel.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>stdenv.lib.licenses.unfree</varname>, <varname>"unfree"</varname>
</term>
<listitem>
<para>
Unfree package that cannot be redistributed. You can build it yourself,
but you cannot redistribute the output of the derivation. Thus it cannot
be included in the Nixpkgs channel.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>stdenv.lib.licenses.unfreeRedistributableFirmware</varname>, <varname>"unfree-redistributable-firmware"</varname>
</term>
<listitem>
<para>
This package supplies unfree, redistributable firmware. This is a
separate value from <varname>unfree-redistributable</varname> because
not everybody cares whether firmware is free.
</para>
</listitem>
</varlistentry>
</variablelist>
</para>
</section>
</chapter>

View File

@@ -1,323 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE chapter [
<!ENTITY ndash "&#x2013;"> <!-- @vcunat likes to use this one ;-) -->
]>
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="chap-multiple-output">
<title>Multiple-output packages</title>
<section xml:id="sec-multiple-outputs-introduction">
<title>Introduction</title>
<para>
The Nix language allows a derivation to produce multiple outputs, which is
similar to what is utilized by other Linux distribution packaging systems.
The outputs reside in separate nix store paths, so they can be mostly
handled independently of each other, including passing to build inputs,
garbage collection or binary substitution. The exception is that building
from source always produces all the outputs.
</para>
<para>
The main motivation is to save disk space by reducing runtime closure sizes;
consequently also sizes of substituted binaries get reduced. Splitting can
be used to have more granular runtime dependencies, for example the typical
reduction is to split away development-only files, as those are typically
not needed during runtime. As a result, closure sizes of many packages can
get reduced to a half or even much less.
</para>
<note>
<para>
The reduction effects could be instead achieved by building the parts in
completely separate derivations. That would often additionally reduce
build-time closures, but it tends to be much harder to write such
derivations, as build systems typically assume all parts are being built at
once. This compromise approach of single source package producing multiple
binary packages is also utilized often by rpm and deb.
</para>
</note>
</section>
<section xml:id="sec-multiple-outputs-installing">
<title>Installing a split package</title>
<para>
When installing a package via <varname>systemPackages</varname> or
<command>nix-env</command> you have several options:
</para>
<itemizedlist>
<listitem>
<para>
You can install particular outputs explicitly, as each is available in the
Nix language as an attribute of the package. The
<varname>outputs</varname> attribute contains a list of output names.
</para>
</listitem>
<listitem>
<para>
You can let it use the default outputs. These are handled by
<varname>meta.outputsToInstall</varname> attribute that contains a list of
output names.
</para>
<para>
TODO: more about tweaking the attribute, etc.
</para>
</listitem>
<listitem>
<para>
NixOS provides configuration option
<varname>environment.extraOutputsToInstall</varname> that allows adding
extra outputs of <varname>environment.systemPackages</varname> atop the
default ones. It's mainly meant for documentation and debug symbols, and
it's also modified by specific options.
</para>
<note>
<para>
At this moment there is no similar configurability for packages installed
by <command>nix-env</command>. You can still use approach from
<xref linkend="sec-modify-via-packageOverrides" /> to override
<varname>meta.outputsToInstall</varname> attributes, but that's a rather
inconvenient way.
</para>
</note>
</listitem>
</itemizedlist>
</section>
<section xml:id="sec-multiple-outputs-using-split-packages">
<title>Using a split package</title>
<para>
In the Nix language the individual outputs can be reached explicitly as
attributes, e.g. <varname>coreutils.info</varname>, but the typical case is
just using packages as build inputs.
</para>
<para>
When a multiple-output derivation gets into a build input of another
derivation, the <varname>dev</varname> output is added if it exists,
otherwise the first output is added. In addition to that,
<varname>propagatedBuildOutputs</varname> of that package which by default
contain <varname>$outputBin</varname> and <varname>$outputLib</varname> are
also added. (See <xref linkend="multiple-output-file-type-groups" />.)
</para>
</section>
<section xml:id="sec-multiple-outputs-">
<title>Writing a split derivation</title>
<para>
Here you find how to write a derivation that produces multiple outputs.
</para>
<para>
In nixpkgs there is a framework supporting multiple-output derivations. It
tries to cover most cases by default behavior. You can find the source
separated in
&lt;<filename>nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh</filename>&gt;;
it's relatively well-readable. The whole machinery is triggered by defining
the <varname>outputs</varname> attribute to contain the list of desired
output names (strings).
</para>
<programlisting>outputs = [ "bin" "dev" "out" "doc" ];</programlisting>
<para>
Often such a single line is enough. For each output an equally named
environment variable is passed to the builder and contains the path in nix
store for that output. Typically you also want to have the main
<varname>out</varname> output, as it catches any files that didn't get
elsewhere.
</para>
<note>
<para>
There is a special handling of the <varname>debug</varname> output,
described at <xref linkend="stdenv-separateDebugInfo" />.
</para>
</note>
<section xml:id="multiple-output-file-binaries-first-convention">
<title><quote>Binaries first</quote></title>
<para>
A commonly adopted convention in <literal>nixpkgs</literal> is that
executables provided by the package are contained within its first output.
This convention allows the dependent packages to reference the executables
provided by packages in a uniform manner. For instance, provided with the
knowledge that the <literal>perl</literal> package contains a
<literal>perl</literal> executable it can be referenced as
<literal>${pkgs.perl}/bin/perl</literal> within a Nix derivation that needs
to execute a Perl script.
</para>
<para>
The <literal>glibc</literal> package is a deliberate single exception to
the <quote>binaries first</quote> convention. The <literal>glibc</literal>
has <literal>libs</literal> as its first output allowing the libraries
provided by <literal>glibc</literal> to be referenced directly (e.g.
<literal>${stdenv.glibc}/lib/ld-linux-x86-64.so.2</literal>). The
executables provided by <literal>glibc</literal> can be accessed via its
<literal>bin</literal> attribute (e.g.
<literal>${stdenv.glibc.bin}/bin/ldd</literal>).
</para>
<para>
The reason for why <literal>glibc</literal> deviates from the convention is
because referencing a library provided by <literal>glibc</literal> is a
very common operation among Nix packages. For instance, third-party
executables packaged by Nix are typically patched and relinked with the
relevant version of <literal>glibc</literal> libraries from Nix packages
(please see the documentation on
<link xlink:href="https://nixos.org/patchelf.html">patchelf</link> for more
details).
</para>
</section>
<section xml:id="multiple-output-file-type-groups">
<title>File type groups</title>
<para>
The support code currently recognizes some particular kinds of outputs and
either instructs the build system of the package to put files into their
desired outputs or it moves the files during the fixup phase. Each group of
file types has an <varname>outputFoo</varname> variable specifying the
output name where they should go. If that variable isn't defined by the
derivation writer, it is guessed &ndash; a default output name is defined,
falling back to other possibilities if the output isn't defined.
</para>
<variablelist>
<varlistentry>
<term>
<varname> $outputDev</varname>
</term>
<listitem>
<para>
is for development-only files. These include C(++) headers, pkg-config,
cmake and aclocal files. They go to <varname>dev</varname> or
<varname>out</varname> by default.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname> $outputBin</varname>
</term>
<listitem>
<para>
is meant for user-facing binaries, typically residing in bin/. They go
to <varname>bin</varname> or <varname>out</varname> by default.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname> $outputLib</varname>
</term>
<listitem>
<para>
is meant for libraries, typically residing in <filename>lib/</filename>
and <filename>libexec/</filename>. They go to <varname>lib</varname> or
<varname>out</varname> by default.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname> $outputDoc</varname>
</term>
<listitem>
<para>
is for user documentation, typically residing in
<filename>share/doc/</filename>. It goes to <varname>doc</varname> or
<varname>out</varname> by default.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname> $outputDevdoc</varname>
</term>
<listitem>
<para>
is for <emphasis>developer</emphasis> documentation. Currently we count
gtk-doc and devhelp books in there. It goes to <varname>devdoc</varname>
or is removed (!) by default. This is because e.g. gtk-doc tends to be
rather large and completely unused by nixpkgs users.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname> $outputMan</varname>
</term>
<listitem>
<para>
is for man pages (except for section 3). They go to
<varname>man</varname> or <varname>$outputBin</varname> by default.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname> $outputDevman</varname>
</term>
<listitem>
<para>
is for section 3 man pages. They go to <varname>devman</varname> or
<varname>$outputMan</varname> by default.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname> $outputInfo</varname>
</term>
<listitem>
<para>
is for info pages. They go to <varname>info</varname> or
<varname>$outputBin</varname> by default.
</para>
</listitem>
</varlistentry>
</variablelist>
</section>
<section xml:id="sec-multiple-outputs-caveats">
<title>Common caveats</title>
<itemizedlist>
<listitem>
<para>
Some configure scripts don't like some of the parameters passed by
default by the framework, e.g. <literal>--docdir=/foo/bar</literal>. You
can disable this by setting <literal>setOutputFlags = false;</literal>.
</para>
</listitem>
<listitem>
<para>
The outputs of a single derivation can retain references to each other,
but note that circular references are not allowed. (And each
strongly-connected component would act as a single output anyway.)
</para>
</listitem>
<listitem>
<para>
Most of split packages contain their core functionality in libraries.
These libraries tend to refer to various kind of data that typically gets
into <varname>out</varname>, e.g. locale strings, so there is often no
advantage in separating the libraries into <varname>lib</varname>, as
keeping them in <varname>out</varname> is easier.
</para>
</listitem>
<listitem>
<para>
Some packages have hidden assumptions on install paths, which complicates
splitting.
</para>
</listitem>
</itemizedlist>
</section>
</section>
<!--Writing a split derivation-->
</chapter>

View File

@@ -61,10 +61,10 @@ stdenv.mkDerivation {
builder = ./builder.sh;
src = fetchurl {
url = http://ftp.nluug.nl/gnu/binutils/binutils-2.16.1.tar.bz2;
sha256 = "1ian3kwh2vg6hr3ymrv48s04gijs539vzrq62xr76bxbhbwnz2np";
md5 = "6a9d529efb285071dad10e1f3d2b2967";
};
inherit noSysDirs;
configureFlags = [ "--target=arm-linux" ];
configureFlags = "--target=arm-linux";
}
---
@@ -78,14 +78,14 @@ Step 2: build kernel headers for the target architecture
---
{stdenv, fetchurl}:
assert stdenv.buildPlatform.system == "i686-linux";
assert stdenv.system == "i686-linux";
stdenv.mkDerivation {
name = "linux-headers-2.6.13.1-arm";
name = "linux-headers-2.6.13.4-arm";
builder = ./builder.sh;
src = fetchurl {
url = http://www.kernel.org/pub/linux/kernel/v2.6/linux-2.6.13.1.tar.bz2;
sha256 = "12qxmc827fjhaz53kjy7vyrzsaqcg78amiqsb3qm20z26w705lma";
url = http://www.kernel.org/pub/linux/kernel/v2.6/linux-2.6.13.4.tar.bz2;
md5 = "94768d7eef90a9d8174639b2a7d3f58d";
};
}
---
@@ -152,7 +152,9 @@ stdenv.mkDerivation {
builder = ./builder.sh;
src = fetchurl {
url = ftp://ftp.nluug.nl/pub/gnu/gcc/gcc-4.0.2/gcc-core-4.0.2.tar.bz2;
sha256 = "02fxh0asflm8825w23l2jq1wvs7hbnam0jayrivg7zdv2ifnc0rc";
md5 = "f7781398ada62ba255486673e6274b26";
#url = ftp://ftp.nluug.nl/pub/gnu/gcc/gcc-4.0.2/gcc-4.0.2.tar.bz2;
#md5 = "a659b8388cac9db2b13e056e574ceeb0";
};
# !!! apply only if noSysDirs is set
patches = [./no-sys-dirs.patch ./gcc-inhibit.patch];

View File

@@ -0,0 +1,14 @@
Semi-automatic source information updating using "update-upstream-data.sh" script and "src-{,info-}for-*.nix"
1. Recognizing when a pre-existing package uses this mechanism.
Packages using this automatical update mechanism have src-info-for-default.nix and src-for-default.nix next to default.nix. src-info-for-default.nix describes getting the freshest source from upstream web site; src-for-default.nix is a generated file with the current data about used source. Both files define a simple attrSet.
src-info-for-default.nix (for a file grabbed via http) contains at least downloadPage attribute - it is the page we need to look at to find out the latest version. It also contains baseName that is used for automatical generation of package name containing version. It can contain extra data for trickier cases.
src-for-default.nix will contain advertisedUrl (raw URL chosen on the site; its change prompts regeneration of source data), url for fetchurl, hash, version retrieved from the download URL and suggested package name.
2. Updating a package
nixpkgs/pkgs/build-support/upstream-updater directory contains some scripts. The worker script is called update-upstream-data.sh. This script requires main expression name (e.g. default.nix). It can optionally accpet a second parameter, URL which will be used instead of getting one by parsing the downloadPage (version extraction, mirror URL creation etc. will still be run). After running the script, check src-for-default.nix (or replace default.nix with expression name, if there are seceral expressions in the directory) for new version information.

View File

@@ -1,164 +0,0 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="chap-overlays">
<title>Overlays</title>
<para>
This chapter describes how to extend and change Nixpkgs packages using
overlays. Overlays are used to add layers in the fix-point used by Nixpkgs to
compose the set of all packages.
</para>
<para>
Nixpkgs can be configured with a list of overlays, which are applied in
order. This means that the order of the overlays can be significant if
multiple layers override the same package.
</para>
<!--============================================================-->
<section xml:id="sec-overlays-install">
<title>Installing overlays</title>
<para>
The list of overlays is determined as follows.
</para>
<para>
If the <varname>overlays</varname> argument is not provided explicitly, we
look for overlays in a path. The path is determined as follows:
<orderedlist>
<listitem>
<para>
First, if an <varname>overlays</varname> argument to the nixpkgs function
itself is given, then that is used.
</para>
<para>
This can be passed explicitly when importing nipxkgs, for example
<literal>import &lt;nixpkgs> { overlays = [ overlay1 overlay2 ];
}</literal>.
</para>
</listitem>
<listitem>
<para>
Otherwise, if the Nix path entry <literal>&lt;nixpkgs-overlays></literal>
exists, we look for overlays at that path, as described below.
</para>
<para>
See the section on <literal>NIX_PATH</literal> in the Nix manual for more
details on how to set a value for
<literal>&lt;nixpkgs-overlays>.</literal>
</para>
</listitem>
<listitem>
<para>
If one of <filename>~/.config/nixpkgs/overlays.nix</filename> and
<filename>~/.config/nixpkgs/overlays/</filename> exists, then we look for
overlays at that path, as described below. It is an error if both exist.
</para>
</listitem>
</orderedlist>
</para>
<para>
If we are looking for overlays at a path, then there are two cases:
<itemizedlist>
<listitem>
<para>
If the path is a file, then the file is imported as a Nix expression and
used as the list of overlays.
</para>
</listitem>
<listitem>
<para>
If the path is a directory, then we take the content of the directory,
order it lexicographically, and attempt to interpret each as an overlay
by:
<itemizedlist>
<listitem>
<para>
Importing the file, if it is a <literal>.nix</literal> file.
</para>
</listitem>
<listitem>
<para>
Importing a top-level <filename>default.nix</filename> file, if it is
a directory.
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
</itemizedlist>
</para>
<para>
On a NixOS system the value of the <literal>nixpkgs.overlays</literal>
option, if present, is passed to the system Nixpkgs directly as an argument.
Note that this does not affect the overlays for non-NixOS operations (e.g.
<literal>nix-env</literal>), which are looked up independently.
</para>
<para>
The <filename>overlays.nix</filename> option therefore provides a convenient
way to use the same overlays for a NixOS system configuration and user
configuration: the same file can be used as
<filename>overlays.nix</filename> and imported as the value of
<literal>nixpkgs.overlays</literal>.
</para>
</section>
<!--============================================================-->
<section xml:id="sec-overlays-definition">
<title>Defining overlays</title>
<para>
Overlays are Nix functions which accept two arguments, conventionally called
<varname>self</varname> and <varname>super</varname>, and return a set of
packages. For example, the following is a valid overlay.
</para>
<programlisting>
self: super:
{
boost = super.boost.override {
python = self.python3;
};
rr = super.callPackage ./pkgs/rr {
stdenv = self.stdenv_32bit;
};
}
</programlisting>
<para>
The first argument (<varname>self</varname>) corresponds to the final
package set. You should use this set for the dependencies of all packages
specified in your overlay. For example, all the dependencies of
<varname>rr</varname> in the example above come from
<varname>self</varname>, as well as the overridden dependencies used in the
<varname>boost</varname> override.
</para>
<para>
The second argument (<varname>super</varname>) corresponds to the result of
the evaluation of the previous stages of Nixpkgs. It does not contain any of
the packages added by the current overlay, nor any of the following
overlays. This set should be used either to refer to packages you wish to
override, or to access functions defined in Nixpkgs. For example, the
original recipe of <varname>boost</varname> in the above example, comes from
<varname>super</varname>, as well as the <varname>callPackage</varname>
function.
</para>
<para>
The value returned by this function should be a set similar to
<filename>pkgs/top-level/all-packages.nix</filename>, containing overridden
and/or new packages.
</para>
<para>
Overlays are similar to other methods for customizing Nixpkgs, in particular
the <literal>packageOverrides</literal> attribute described in
<xref linkend="sec-modify-via-packageOverrides"/>. Indeed,
<literal>packageOverrides</literal> acts as an overlay with only the
<varname>super</varname> argument. It is therefore appropriate for basic
use, but overlays are more powerful and easier to distribute.
</para>
</section>
</chapter>

View File

@@ -1,9 +0,0 @@
.docbook .xref img[src^=images\/callouts\/],
.screen img,
.programlisting img {
width: 1em;
}
.calloutlist img {
width: 1.5em;
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,105 +0,0 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="chap-platform-nodes">
<title>Platform Notes</title>
<section xml:id="sec-darwin">
<title>Darwin (macOS)</title>
<para>
Some common issues when packaging software for darwin:
</para>
<itemizedlist>
<listitem>
<para>
The darwin <literal>stdenv</literal> uses clang instead of gcc. When
referring to the compiler <varname>$CC</varname> or <command>cc</command>
will work in both cases. Some builds hardcode gcc/g++ in their build
scripts, that can usually be fixed with using something like
<literal>makeFlags = [ "CC=cc" ];</literal> or by patching the build
scripts.
</para>
<programlisting>
stdenv.mkDerivation {
name = "libfoo-1.2.3";
# ...
buildPhase = ''
$CC -o hello hello.c
'';
}
</programlisting>
</listitem>
<listitem>
<para>
On darwin libraries are linked using absolute paths, libraries are
resolved by their <literal>install_name</literal> at link time. Sometimes
packages won't set this correctly causing the library lookups to fail at
runtime. This can be fixed by adding extra linker flags or by running
<command>install_name_tool -id</command> during the
<function>fixupPhase</function>.
</para>
<programlisting>
stdenv.mkDerivation {
name = "libfoo-1.2.3";
# ...
makeFlags = stdenv.lib.optional stdenv.isDarwin "LDFLAGS=-Wl,-install_name,$(out)/lib/libfoo.dylib";
}
</programlisting>
</listitem>
<listitem>
<para>
Even if the libraries are linked using absolute paths and resolved via
their <literal>install_name</literal> correctly, tests can sometimes fail
to run binaries. This happens because the <varname>checkPhase</varname>
runs before the libraries are installed.
</para>
<para>
This can usually be solved by running the tests after the
<varname>installPhase</varname> or alternatively by using
<varname>DYLD_LIBRARY_PATH</varname>. More information about this variable
can be found in the <citerefentry>
<refentrytitle>dyld</refentrytitle>
<manvolnum>1</manvolnum></citerefentry> manpage.
</para>
<programlisting>
dyld: Library not loaded: /nix/store/7hnmbscpayxzxrixrgxvvlifzlxdsdir-jq-1.5-lib/lib/libjq.1.dylib
Referenced from: /private/tmp/nix-build-jq-1.5.drv-0/jq-1.5/tests/../jq
Reason: image not found
./tests/jqtest: line 5: 75779 Abort trap: 6
</programlisting>
<programlisting>
stdenv.mkDerivation {
name = "libfoo-1.2.3";
# ...
doInstallCheck = true;
installCheckTarget = "check";
}
</programlisting>
</listitem>
<listitem>
<para>
Some packages assume xcode is available and use <command>xcrun</command>
to resolve build tools like <command>clang</command>, etc. This causes
errors like <code>xcode-select: error: no developer tools were found at
'/Applications/Xcode.app'</code> while the build doesn't actually depend
on xcode.
</para>
<programlisting>
stdenv.mkDerivation {
name = "libfoo-1.2.3";
# ...
prePatch = ''
substituteInPlace Makefile \
--replace '/usr/bin/xcrun clang' clang
'';
}
</programlisting>
<para>
The package <literal>xcbuild</literal> can be used to build projects that
really depend on Xcode, however projects that build some kind of graphical
interface won't work without using Xcode in an impure way.
</para>
</listitem>
</itemizedlist>
</section>
</chapter>

View File

@@ -1,219 +1,223 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="chap-quick-start">
<title>Quick Start to Adding a Package</title>
<para>
To add a package to Nixpkgs:
<orderedlist>
<listitem>
<para>
Checkout the Nixpkgs source tree:
<title>Quick Start to Adding a Package</title>
<para>To add a package to Nixpkgs:
<orderedlist>
<listitem>
<para>Checkout the Nixpkgs source tree:
<screen>
$ git clone https://github.com/NixOS/nixpkgs
$ git clone git://github.com/NixOS/nixpkgs.git
$ cd nixpkgs</screen>
</para>
</listitem>
<listitem>
<para>
Find a good place in the Nixpkgs tree to add the Nix expression for your
package. For instance, a library package typically goes into
<filename>pkgs/development/libraries/<replaceable>pkgname</replaceable></filename>,
while a web browser goes into
<filename>pkgs/applications/networking/browsers/<replaceable>pkgname</replaceable></filename>.
See <xref linkend="sec-organisation" /> for some hints on the tree
organisation. Create a directory for your package, e.g.
</listitem>
<listitem>
<para>Find a good place in the Nixpkgs tree to add the Nix
expression for your package. For instance, a library package
typically goes into
<filename>pkgs/development/libraries/<replaceable>pkgname</replaceable></filename>,
while a web browser goes into
<filename>pkgs/applications/networking/browsers/<replaceable>pkgname</replaceable></filename>.
See <xref linkend="sec-organisation" /> for some hints on the tree
organisation. Create a directory for your package, e.g.
<screen>
$ mkdir pkgs/development/libraries/libfoo</screen>
</para>
</listitem>
<listitem>
<para>
In the package directory, create a Nix expression — a piece of code that
describes how to build the package. In this case, it should be a
<emphasis>function</emphasis> that is called with the package dependencies
as arguments, and returns a build of the package in the Nix store. The
expression should usually be called <filename>default.nix</filename>.
</listitem>
<listitem>
<para>In the package directory, create a Nix expression — a piece
of code that describes how to build the package. In this case, it
should be a <emphasis>function</emphasis> that is called with the
package dependencies as arguments, and returns a build of the
package in the Nix store. The expression should usually be called
<filename>default.nix</filename>.
<screen>
$ emacs pkgs/development/libraries/libfoo/default.nix
$ git add pkgs/development/libraries/libfoo/default.nix</screen>
</para>
<para>
You can have a look at the existing Nix expressions under
<filename>pkgs/</filename> to see how its done. Here are some good
ones:
<itemizedlist>
<listitem>
<para>
GNU Hello:
<link
<para>You can have a look at the existing Nix expressions under
<filename>pkgs/</filename> to see how its done. Here are some
good ones:
<itemizedlist>
<listitem>
<para>GNU Hello: <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/misc/hello/default.nix"><filename>pkgs/applications/misc/hello/default.nix</filename></link>.
Trivial package, which specifies some <varname>meta</varname>
attributes which is good practice.
</para>
</listitem>
<listitem>
<para>
GNU cpio:
<link
Trivial package, which specifies some <varname>meta</varname>
attributes which is good practice.</para>
</listitem>
<listitem>
<para>GNU cpio: <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/archivers/cpio/default.nix"><filename>pkgs/tools/archivers/cpio/default.nix</filename></link>.
Also a simple package. The generic builder in <varname>stdenv</varname>
does everything for you. It has no dependencies beyond
<varname>stdenv</varname>.
</para>
</listitem>
<listitem>
<para>
GNU Multiple Precision arithmetic library (GMP):
<link
Also a simple package. The generic builder in
<varname>stdenv</varname> does everything for you. It has
no dependencies beyond <varname>stdenv</varname>.</para>
</listitem>
<listitem>
<para>GNU Multiple Precision arithmetic library (GMP): <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/libraries/gmp/5.1.x.nix"><filename>pkgs/development/libraries/gmp/5.1.x.nix</filename></link>.
Also done by the generic builder, but has a dependency on
<varname>m4</varname>.
</para>
</listitem>
<listitem>
<para>
Pan, a GTK-based newsreader:
<link
Also done by the generic builder, but has a dependency on
<varname>m4</varname>.</para>
</listitem>
<listitem>
<para>Pan, a GTK-based newsreader: <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/networking/newsreaders/pan/default.nix"><filename>pkgs/applications/networking/newsreaders/pan/default.nix</filename></link>.
Has an optional dependency on <varname>gtkspell</varname>, which is
only built if <varname>spellCheck</varname> is <literal>true</literal>.
</para>
</listitem>
<listitem>
<para>
Apache HTTPD:
<link
Has an optional dependency on <varname>gtkspell</varname>,
which is only built if <varname>spellCheck</varname> is
<literal>true</literal>.</para>
</listitem>
<listitem>
<para>Apache HTTPD: <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/servers/http/apache-httpd/2.4.nix"><filename>pkgs/servers/http/apache-httpd/2.4.nix</filename></link>.
A bunch of optional features, variable substitutions in the configure
flags, a post-install hook, and miscellaneous hackery.
</para>
</listitem>
<listitem>
<para>
Thunderbird:
<link
A bunch of optional features, variable substitutions in the
configure flags, a post-install hook, and miscellaneous
hackery.</para>
</listitem>
<listitem>
<para>Thunderbird: <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/networking/mailreaders/thunderbird/default.nix"><filename>pkgs/applications/networking/mailreaders/thunderbird/default.nix</filename></link>.
Lots of dependencies.
</para>
</listitem>
<listitem>
<para>
JDiskReport, a Java utility:
<link
Lots of dependencies.</para>
</listitem>
<listitem>
<para>JDiskReport, a Java utility: <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/misc/jdiskreport/default.nix"><filename>pkgs/tools/misc/jdiskreport/default.nix</filename></link>
(and the
<link
(and the <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/misc/jdiskreport/builder.sh">builder</link>).
Nixpkgs doesnt have a decent <varname>stdenv</varname> for Java yet
so this is pretty ad-hoc.
</para>
</listitem>
<listitem>
<para>
XML::Simple, a Perl module:
<link
Nixpkgs doesnt have a decent <varname>stdenv</varname> for
Java yet so this is pretty ad-hoc.</para>
</listitem>
<listitem>
<para>XML::Simple, a Perl module: <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/perl-packages.nix"><filename>pkgs/top-level/perl-packages.nix</filename></link>
(search for the <varname>XMLSimple</varname> attribute). Most Perl
modules are so simple to build that they are defined directly in
<filename>perl-packages.nix</filename>; no need to make a separate file
for them.
</para>
</listitem>
<listitem>
<para>
Adobe Reader:
<link
(search for the <varname>XMLSimple</varname> attribute).
Most Perl modules are so simple to build that they are
defined directly in <filename>perl-packages.nix</filename>;
no need to make a separate file for them.</para>
</listitem>
<listitem>
<para>Adobe Reader: <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/misc/adobe-reader/default.nix"><filename>pkgs/applications/misc/adobe-reader/default.nix</filename></link>.
Shows how binary-only packages can be supported. In particular the
<link
Shows how binary-only packages can be supported. In
particular the <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/misc/adobe-reader/builder.sh">builder</link>
uses <command>patchelf</command> to set the RUNPATH and ELF interpreter
of the executables so that the right libraries are found at runtime.
</para>
</listitem>
</itemizedlist>
uses <command>patchelf</command> to set the RUNPATH and ELF
interpreter of the executables so that the right libraries
are found at runtime.</para>
</listitem>
</itemizedlist>
</para>
<para>
Some notes:
<itemizedlist>
<listitem>
<para>
All <varname linkend="chap-meta">meta</varname> attributes are
optional, but its still a good idea to provide at least the
<varname>description</varname>, <varname>homepage</varname> and
<varname
linkend="sec-meta-license">license</varname>.
</para>
</listitem>
<listitem>
<para>
You can use <command>nix-prefetch-url</command> (or similar
nix-prefetch-git, etc) <replaceable>url</replaceable> to get the
SHA-256 hash of source distributions. There are similar commands as
<command>nix-prefetch-git</command> and
<command>nix-prefetch-hg</command> available in
<literal>nix-prefetch-scripts</literal> package.
</para>
</listitem>
<listitem>
<para>
A list of schemes for <literal>mirror://</literal> URLs can be found in
<link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/build-support/fetchurl/mirrors.nix"><filename>pkgs/build-support/fetchurl/mirrors.nix</filename></link>.
</para>
</listitem>
</itemizedlist>
<para>Some notes:
<itemizedlist>
<listitem>
<para>All <varname linkend="chap-meta">meta</varname>
attributes are optional, but its still a good idea to
provide at least the <varname>description</varname>,
<varname>homepage</varname> and <varname
linkend="sec-meta-license">license</varname>.</para>
</listitem>
<listitem>
<para>You can use <command>nix-prefetch-url</command> (or similar nix-prefetch-git, etc)
<replaceable>url</replaceable> to get the SHA-256 hash of
source distributions. There are similar commands as <command>nix-prefetch-git</command> and
<command>nix-prefetch-hg</command> available in <literal>nix-prefetch-scripts</literal> package.</para>
</listitem>
<listitem>
<para>A list of schemes for <literal>mirror://</literal>
URLs can be found in <link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/build-support/fetchurl/mirrors.nix"><filename>pkgs/build-support/fetchurl/mirrors.nix</filename></link>.</para>
</listitem>
</itemizedlist>
</para>
<para>
The exact syntax and semantics of the Nix expression language, including
the built-in function, are described in the Nix manual in the
<link
<para>The exact syntax and semantics of the Nix expression
language, including the built-in function, are described in the
Nix manual in the <link
xlink:href="http://hydra.nixos.org/job/nix/trunk/tarball/latest/download-by-type/doc/manual/#chap-writing-nix-expressions">chapter
on writing Nix expressions</link>.
</para>
</listitem>
<listitem>
<para>
Add a call to the function defined in the previous step to
<link
on writing Nix expressions</link>.</para>
</listitem>
<listitem>
<para>Add a call to the function defined in the previous step to
<link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/pkgs/top-level/all-packages.nix"><filename>pkgs/top-level/all-packages.nix</filename></link>
with some descriptive name for the variable, e.g.
<varname>libfoo</varname>.
<screen>
with some descriptive name for the variable,
e.g. <varname>libfoo</varname>.
<screen>
$ emacs pkgs/top-level/all-packages.nix</screen>
</para>
<para>
The attributes in that file are sorted by category (like “Development /
Libraries”) that more-or-less correspond to the directory structure of
Nixpkgs, and then by attribute name.
</para>
</listitem>
<listitem>
<para>
To test whether the package builds, run the following command from the
root of the nixpkgs source tree:
<screen>
<para>The attributes in that file are sorted by category (like
“Development / Libraries”) that more-or-less correspond to the
directory structure of Nixpkgs, and then by attribute name.</para>
</listitem>
<listitem>
<para>To test whether the package builds, run the following command
from the root of the nixpkgs source tree:
<screen>
$ nix-build -A libfoo</screen>
where <varname>libfoo</varname> should be the variable name defined in the
previous step. You may want to add the flag <option>-K</option> to keep
the temporary build directory in case something fails. If the build
succeeds, a symlink <filename>./result</filename> to the package in the
Nix store is created.
</para>
</listitem>
<listitem>
<para>
If you want to install the package into your profile (optional), do
<screen>
where <varname>libfoo</varname> should be the variable name
defined in the previous step. You may want to add the flag
<option>-K</option> to keep the temporary build directory in case
something fails. If the build succeeds, a symlink
<filename>./result</filename> to the package in the Nix store is
created.</para>
</listitem>
<listitem>
<para>If you want to install the package into your profile
(optional), do
<screen>
$ nix-env -f . -iA libfoo</screen>
</para>
</listitem>
<listitem>
<para>
Optionally commit the new package and open a pull request, or send a patch
to <literal>https://groups.google.com/forum/#!forum/nix-devel</literal>.
</para>
</listitem>
</orderedlist>
</para>
</listitem>
<listitem>
<para>Optionally commit the new package and open a pull request, or send a patch to
<literal>nix-dev@cs.uu.nl</literal>.</para>
</listitem>
</orderedlist>
</para>
</chapter>

File diff suppressed because it is too large Load Diff

View File

@@ -1,618 +0,0 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
version="5.0"
xml:id="sec-reviewing-contributions">
<title>Reviewing contributions</title>
<warning>
<para>
The following section is a draft, and the policy for reviewing is still
being discussed in issues such as
<link
xlink:href="https://github.com/NixOS/nixpkgs/issues/11166">#11166
</link> and
<link
xlink:href="https://github.com/NixOS/nixpkgs/issues/20836">#20836
</link>.
</para>
</warning>
<para>
The nixpkgs project receives a fairly high number of contributions via GitHub
pull-requests. Reviewing and approving these is an important task and a way
to contribute to the project.
</para>
<para>
The high change rate of nixpkgs makes any pull request that remains open for
too long subject to conflicts that will require extra work from the submitter
or the merger. Reviewing pull requests in a timely manner and being
responsive to the comments is the key to avoid these. GitHub provides sort
filters that can be used to see the
<link
xlink:href="https://github.com/NixOS/nixpkgs/pulls?q=is%3Apr+is%3Aopen+sort%3Aupdated-desc">most
recently</link> and the
<link
xlink:href="https://github.com/NixOS/nixpkgs/pulls?q=is%3Apr+is%3Aopen+sort%3Aupdated-asc">least
recently</link> updated pull-requests. We highly encourage looking at
<link xlink:href="https://github.com/NixOS/nixpkgs/pulls?q=is%3Apr+is%3Aopen+review%3Anone+status%3Asuccess+-label%3A%222.status%3A+work-in-progress%22+no%3Aproject+no%3Aassignee+no%3Amilestone">
this list of ready to merge, unreviewed pull requests</link>.
</para>
<para>
When reviewing a pull request, please always be nice and polite.
Controversial changes can lead to controversial opinions, but it is important
to respect every community member and their work.
</para>
<para>
GitHub provides reactions as a simple and quick way to provide feedback to
pull-requests or any comments. The thumb-down reaction should be used with
care and if possible accompanied with some explanation so the submitter has
directions to improve their contribution.
</para>
<para>
Pull-request reviews should include a list of what has been reviewed in a
comment, so other reviewers and mergers can know the state of the review.
</para>
<para>
All the review template samples provided in this section are generic and
meant as examples. Their usage is optional and the reviewer is free to adapt
them to their liking.
</para>
<section xml:id="reviewing-contributions-package-updates">
<title>Package updates</title>
<para>
A package update is the most trivial and common type of pull-request. These
pull-requests mainly consist of updating the version part of the package
name and the source hash.
</para>
<para>
It can happen that non-trivial updates include patches or more complex
changes.
</para>
<para>
Reviewing process:
</para>
<itemizedlist>
<listitem>
<para>
Add labels to the pull-request. (Requires commit rights)
</para>
<itemizedlist>
<listitem>
<para>
<literal>8.has: package (update)</literal> and any topic label that fit
the updated package.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that the package versioning fits the guidelines.
</para>
</listitem>
<listitem>
<para>
Ensure that the commit text fits the guidelines.
</para>
</listitem>
<listitem>
<para>
Ensure that the package maintainers are notified.
</para>
<itemizedlist>
<listitem>
<para>
<link xlink:href="https://help.github.com/articles/about-codeowners/">CODEOWNERS</link>
will make GitHub notify users based on the submitted changes, but it can
happen that it misses some of the package maintainers.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that the meta field information is correct.
</para>
<itemizedlist>
<listitem>
<para>
License can change with version updates, so it should be checked to
match the upstream license.
</para>
</listitem>
<listitem>
<para>
If the package has no maintainer, a maintainer must be set. This can be
the update submitter or a community member that accepts to take
maintainership of the package.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that the code contains no typos.
</para>
</listitem>
<listitem>
<para>
Building the package locally.
</para>
<itemizedlist>
<listitem>
<para>
Pull-requests are often targeted to the master or staging branch, and
building the pull-request locally when it is submitted can trigger many
source builds.
</para>
<para>
It is possible to rebase the changes on nixos-unstable or
nixpkgs-unstable for easier review by running the following commands
from a nixpkgs clone.
<screen>
$ git remote add channels https://github.com/NixOS/nixpkgs-channels.git <co
xml:id='reviewing-rebase-1' />
$ git fetch channels nixos-unstable <co xml:id='reviewing-rebase-2' />
$ git fetch origin pull/PRNUMBER/head <co xml:id='reviewing-rebase-3' />
$ git rebase --onto nixos-unstable BASEBRANCH FETCH_HEAD <co
xml:id='reviewing-rebase-4' />
</screen>
<calloutlist>
<callout arearefs='reviewing-rebase-1'>
<para>
This should be done only once to be able to fetch channel branches
from the nixpkgs-channels repository.
</para>
</callout>
<callout arearefs='reviewing-rebase-2'>
<para>
Fetching the nixos-unstable branch.
</para>
</callout>
<callout arearefs='reviewing-rebase-3'>
<para>
Fetching the pull-request changes, <varname>PRNUMBER</varname> is the
number at the end of the pull-request title and
<varname>BASEBRANCH</varname> the base branch of the pull-request.
</para>
</callout>
<callout arearefs='reviewing-rebase-4'>
<para>
Rebasing the pull-request changes to the nixos-unstable branch.
</para>
</callout>
</calloutlist>
</para>
</listitem>
<listitem>
<para>
The <link xlink:href="https://github.com/madjar/nox">nox</link> tool can
be used to review a pull-request content in a single command. It doesn't
rebase on a channel branch so it might trigger multiple source builds.
<varname>PRNUMBER</varname> should be replaced by the number at the end
of the pull-request title.
</para>
<screen>
$ nix-shell -p nox --run "nox-review -k pr PRNUMBER"
</screen>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Running every binary.
</para>
</listitem>
</itemizedlist>
<example xml:id="reviewing-contributions-sample-package-update">
<title>Sample template for a package update review</title>
<screen>
##### Reviewed points
- [ ] package name fits guidelines
- [ ] package version fits guidelines
- [ ] package build on ARCHITECTURE
- [ ] executables tested on ARCHITECTURE
- [ ] all depending packages build
##### Possible improvements
##### Comments
</screen>
</example>
</section>
<section xml:id="reviewing-contributions-new-packages">
<title>New packages</title>
<para>
New packages are a common type of pull-requests. These pull requests
consists in adding a new nix-expression for a package.
</para>
<para>
Reviewing process:
</para>
<itemizedlist>
<listitem>
<para>
Add labels to the pull-request. (Requires commit rights)
</para>
<itemizedlist>
<listitem>
<para>
<literal>8.has: package (new)</literal> and any topic label that fit the
new package.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that the package versioning is fitting the guidelines.
</para>
</listitem>
<listitem>
<para>
Ensure that the commit name is fitting the guidelines.
</para>
</listitem>
<listitem>
<para>
Ensure that the meta field contains correct information.
</para>
<itemizedlist>
<listitem>
<para>
License must be checked to be fitting upstream license.
</para>
</listitem>
<listitem>
<para>
Platforms should be set or the package will not get binary substitutes.
</para>
</listitem>
<listitem>
<para>
A maintainer must be set, this can be the package submitter or a
community member that accepts to take maintainership of the package.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that the code contains no typos.
</para>
</listitem>
<listitem>
<para>
Ensure the package source.
</para>
<itemizedlist>
<listitem>
<para>
Mirrors urls should be used when available.
</para>
</listitem>
<listitem>
<para>
The most appropriate function should be used (e.g. packages from GitHub
should use <literal>fetchFromGitHub</literal>).
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Building the package locally.
</para>
</listitem>
<listitem>
<para>
Running every binary.
</para>
</listitem>
</itemizedlist>
<example xml:id="reviewing-contributions-sample-new-package">
<title>Sample template for a new package review</title>
<screen>
##### Reviewed points
- [ ] package path fits guidelines
- [ ] package name fits guidelines
- [ ] package version fits guidelines
- [ ] package build on ARCHITECTURE
- [ ] executables tested on ARCHITECTURE
- [ ] `meta.description` is set and fits guidelines
- [ ] `meta.license` fits upstream license
- [ ] `meta.platforms` is set
- [ ] `meta.maintainers` is set
- [ ] build time only dependencies are declared in `nativeBuildInputs`
- [ ] source is fetched using the appropriate function
- [ ] phases are respected
- [ ] patches that are remotely available are fetched with `fetchpatch`
##### Possible improvements
##### Comments
</screen>
</example>
</section>
<section xml:id="reviewing-contributions-module-updates">
<title>Module updates</title>
<para>
Module updates are submissions changing modules in some ways. These often
contains changes to the options or introduce new options.
</para>
<para>
Reviewing process
</para>
<itemizedlist>
<listitem>
<para>
Add labels to the pull-request. (Requires commit rights)
</para>
<itemizedlist>
<listitem>
<para>
<literal>8.has: module (update)</literal> and any topic label that fit
the module.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that the module maintainers are notified.
</para>
<itemizedlist>
<listitem>
<para>
<link xlink:href="https://help.github.com/articles/about-codeowners/">CODEOWNERS</link>
will make GitHub notify users based on the submitted changes, but it can
happen that it misses some of the package maintainers.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that the module tests, if any, are succeeding.
</para>
</listitem>
<listitem>
<para>
Ensure that the introduced options are correct.
</para>
<itemizedlist>
<listitem>
<para>
Type should be appropriate (string related types differs in their
merging capabilities, <literal>optionSet</literal> and
<literal>string</literal> types are deprecated).
</para>
</listitem>
<listitem>
<para>
Description, default and example should be provided.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that option changes are backward compatible.
</para>
<itemizedlist>
<listitem>
<para>
<literal>mkRenamedOptionModule</literal> and
<literal>mkAliasOptionModule</literal> functions provide way to make
option changes backward compatible.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that removed options are declared with
<literal>mkRemovedOptionModule</literal>
</para>
</listitem>
<listitem>
<para>
Ensure that changes that are not backward compatible are mentioned in
release notes.
</para>
</listitem>
<listitem>
<para>
Ensure that documentations affected by the change is updated.
</para>
</listitem>
</itemizedlist>
<example xml:id="reviewing-contributions-sample-module-update">
<title>Sample template for a module update review</title>
<screen>
##### Reviewed points
- [ ] changes are backward compatible
- [ ] removed options are declared with `mkRemovedOptionModule`
- [ ] changes that are not backward compatible are documented in release notes
- [ ] module tests succeed on ARCHITECTURE
- [ ] options types are appropriate
- [ ] options description is set
- [ ] options example is provided
- [ ] documentation affected by the changes is updated
##### Possible improvements
##### Comments
</screen>
</example>
</section>
<section xml:id="reviewing-contributions-new-modules">
<title>New modules</title>
<para>
New modules submissions introduce a new module to NixOS.
</para>
<itemizedlist>
<listitem>
<para>
Add labels to the pull-request. (Requires commit rights)
</para>
<itemizedlist>
<listitem>
<para>
<literal>8.has: module (new)</literal> and any topic label that fit the
module.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that the module tests, if any, are succeeding.
</para>
</listitem>
<listitem>
<para>
Ensure that the introduced options are correct.
</para>
<itemizedlist>
<listitem>
<para>
Type should be appropriate (string related types differs in their
merging capabilities, <literal>optionSet</literal> and
<literal>string</literal> types are deprecated).
</para>
</listitem>
<listitem>
<para>
Description, default and example should be provided.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that module <literal>meta</literal> field is present
</para>
<itemizedlist>
<listitem>
<para>
Maintainers should be declared in <literal>meta.maintainers</literal>.
</para>
</listitem>
<listitem>
<para>
Module documentation should be declared with
<literal>meta.doc</literal>.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Ensure that the module respect other modules functionality.
</para>
<itemizedlist>
<listitem>
<para>
For example, enabling a module should not open firewall ports by
default.
</para>
</listitem>
</itemizedlist>
</listitem>
</itemizedlist>
<example xml:id="reviewing-contributions-sample-new-module">
<title>Sample template for a new module review</title>
<screen>
##### Reviewed points
- [ ] module path fits the guidelines
- [ ] module tests succeed on ARCHITECTURE
- [ ] options have appropriate types
- [ ] options have default
- [ ] options have example
- [ ] options have descriptions
- [ ] No unneeded package is added to environment.systemPackages
- [ ] meta.maintainers is set
- [ ] module documentation is declared in meta.doc
##### Possible improvements
##### Comments
</screen>
</example>
</section>
<section xml:id="reviewing-contributions-other-submissions">
<title>Other submissions</title>
<para>
Other type of submissions requires different reviewing steps.
</para>
<para>
If you consider having enough knowledge and experience in a topic and would
like to be a long-term reviewer for related submissions, please contact the
current reviewers for that topic. They will give you information about the
reviewing process. The main reviewers for a topic can be hard to find as
there is no list, but checking past pull-requests to see who reviewed or
git-blaming the code to see who committed to that topic can give some hints.
</para>
<para>
Container system, boot system and library changes are some examples of the
pull requests fitting this category.
</para>
</section>
<section xml:id="reviewing-contributions--merging-pull-requests">
<title>Merging pull-requests</title>
<para>
It is possible for community members that have enough knowledge and
experience on a special topic to contribute by merging pull requests.
</para>
<para>
TODO: add the procedure to request merging rights.
</para>
<!--
The following paragraph about how to deal with unactive contributors is just a
proposition and should be modified to what the community agrees to be the right
policy.
<para>Please note that contributors with commit rights unactive for more than
three months will have their commit rights revoked.</para>
-->
<para>
In a case a contributor leaves definitively the Nix community, he should
create an issue or post on
<link
xlink:href="https://discourse.nixos.org">Discourse</link> with
references of packages and modules he maintains so the maintainership can be
taken over by other contributors.
</para>
</section>
</chapter>

View File

@@ -1,5 +0,0 @@
{ pkgs ? import ../. {} }:
(import ./default.nix {}).overrideAttrs (x: {
buildInputs = x.buildInputs ++ [ pkgs.xmloscopy pkgs.ruby ];
})

File diff suppressed because it is too large Load Diff

View File

@@ -29,8 +29,8 @@ h2 /* chapters, appendices, subtitle */
}
/* Extra space between chapters, appendices. */
div.chapter > div.titlepage h2, div.appendix > div.titlepage h2
{
div.chapter > div.titlepage h2, div.appendix > div.titlepage h2
{
margin-top: 1.5em;
}
@@ -104,7 +104,7 @@ pre.screen, pre.programlisting
padding: 3px 3px;
margin-left: 1.5em;
margin-right: 1.5em;
color: #600000;
background: #f4f4f8;
font-family: monospace;
border-radius: 0.4em;
@@ -118,6 +118,7 @@ div.example pre.programlisting
margin: 0 0 0 0;
}
/***************************************************************************
Notes, warnings etc:
***************************************************************************/
@@ -171,7 +172,7 @@ div.navfooter *
/***************************************************************************
Links colors and highlighting:
Links colors and highlighting:
***************************************************************************/
a { text-decoration: none; }
@@ -208,7 +209,7 @@ tt, code
.term
{
font-weight: bold;
}
div.variablelist dd p, div.glosslist dd p
@@ -248,24 +249,7 @@ table
box-shadow: 0.4em 0.4em 0.5em #e0e0e0;
}
table.simplelist
{
text-align: left;
color: #005aa0;
border: 0;
padding: 5px;
background: #fffff5;
font-weight: normal;
font-style: italic;
box-shadow: none;
margin-bottom: 1em;
}
div.navheader table, div.navfooter table {
box-shadow: none;
}
div.affiliation
{
font-style: italic;
}
}

View File

@@ -1,513 +1,320 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:id="chap-submitting-changes">
<title>Submitting changes</title>
<section xml:id="submitting-changes-making-patches">
<title>Making patches</title>
<itemizedlist>
<listitem>
<para>
Read <link xlink:href="https://nixos.org/nixpkgs/manual/">Manual (How to
write packages for Nix)</link>.
</para>
</listitem>
<listitem>
<para>
Fork the repository on GitHub.
</para>
</listitem>
<listitem>
<para>
Create a branch for your future fix.
<itemizedlist>
<listitem>
<para>
You can make branch from a commit of your local
<command>nixos-version</command>. That will help you to avoid
additional local compilations. Because you will receive packages from
binary cache.
<itemizedlist>
<listitem>
<para>
For example: <command>nixos-version</command> returns
<command>15.05.git.0998212 (Dingo)</command>. So you can do:
</para>
</listitem>
</itemizedlist>
<title>Submitting changes</title>
<section>
<title>Making patches</title>
<itemizedlist>
<listitem>
<para>Read <link xlink:href="https://nixos.org/nixpkgs/manual/">Manual (How to write packages for Nix)</link>.</para>
</listitem>
<listitem>
<para>Fork the repository on GitHub.</para>
</listitem>
<listitem>
<para>Create a branch for your future fix.
<itemizedlist>
<listitem>
<para>You can make branch from a commit of your local <command>nixos-version</command>. That will help you to avoid additional local compilations. Because you will receive packages from binary cache.
<itemizedlist>
<listitem>
<para>For example: <command>nixos-version</command> returns <command>15.05.git.0998212 (Dingo)</command>. So you can do:</para>
</listitem>
</itemizedlist>
<screen>
$ git checkout 0998212
$ git checkout -b 'fix/pkg-name-update'
</screen>
</para>
</listitem>
<listitem>
<para>
Please avoid working directly on the <command>master</command> branch.
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
Make commits of logical units.
<itemizedlist>
<listitem>
<para>
If you removed pkgs, made some major NixOS changes etc., write about
them in
<command>nixos/doc/manual/release-notes/rl-unstable.xml</command>.
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
Check for unnecessary whitespace with <command>git diff --check</command>
before committing.
</para>
</listitem>
<listitem>
<para>
Format the commit in a following way:
</para>
</para>
</listitem>
<listitem>
<para>Please avoid working directly on the <command>master</command> branch.</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>Make commits of logical units.
<itemizedlist>
<listitem>
<para>If you removed pkgs, made some major NixOS changes etc., write about them in <command>nixos/doc/manual/release-notes/rl-unstable.xml</command>.</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>Check for unnecessary whitespace with <command>git diff --check</command> before committing.</para>
</listitem>
<listitem>
<para>Format the commit in a following way:</para>
<programlisting>
(pkg-name | nixos/&lt;module>): (from -> to | init at version | refactor | etc)
(pkg-name | service-name): (from -> to | init at version | refactor | etc)
Additional information.
</programlisting>
<itemizedlist>
<listitem>
<para>
Examples:
<itemizedlist>
<listitem>
<para>
<command>nginx: init at 2.0.1</command>
</para>
</listitem>
<listitem>
<para>
<command>firefox: 54.0.1 -> 55.0</command>
</para>
</listitem>
<listitem>
<para>
<command>nixos/hydra: add bazBaz option</command>
</para>
</listitem>
<listitem>
<para>
<command>nixos/nginx: refactor config generation</command>
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Test your changes. If you work with
<itemizedlist>
<listitem>
<para>
nixpkgs:
<itemizedlist>
<listitem>
<para>
update pkg ->
<itemizedlist>
<listitem>
<para>
<command>nix-env -i pkg-name -f &lt;path to your local nixpkgs
folder&gt;</command>
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
add pkg ->
<itemizedlist>
<listitem>
<para>
Make sure it's in
<command>pkgs/top-level/all-packages.nix</command>
</para>
</listitem>
<listitem>
<para>
<command>nix-env -i pkg-name -f &lt;path to your local nixpkgs
folder&gt;</command>
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
<emphasis>If you don't want to install pkg in you
profile</emphasis>.
<itemizedlist>
<listitem>
<para>
<command>nix-build -A pkg-attribute-name &lt;path to your local
nixpkgs folder&gt;/default.nix</command> and check results in the
folder <command>result</command>. It will appear in the same
directory where you did <command>nix-build</command>.
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
If you did <command>nix-env -i pkg-name</command> you can do
<command>nix-env -e pkg-name</command> to uninstall it from your
system.
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
NixOS and its modules:
<itemizedlist>
<listitem>
<para>
You can add new module to your NixOS configuration file (usually
it's <command>/etc/nixos/configuration.nix</command>). And do
<command>sudo nixos-rebuild test -I nixpkgs=&lt;path to your local
nixpkgs folder&gt; --fast</command>.
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
If you have commits <command>pkg-name: oh, forgot to insert
whitespace</command>: squash commits in this case. Use <command>git rebase
-i</command>.
</para>
</listitem>
<listitem>
<para>
Rebase you branch against current <command>master</command>.
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="submitting-changes-submitting-changes">
<title>Submitting changes</title>
<itemizedlist>
<listitem>
<para>Examples:
<itemizedlist>
<listitem>
<para>
<command>nginx: init at 2.0.1</command>
</para>
</listitem>
<listitem>
<para>
<command>firefox: 3.0 -> 3.1.1</command>
</para>
</listitem>
<listitem>
<para>
<command>hydra service: add bazBaz option</command>
</para>
</listitem>
<listitem>
<para>
<command>nginx service: refactor config generation</command>
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>Test your changes. If you work with
<itemizedlist>
<listitem>
<para>nixpkgs:
<itemizedlist>
<listitem>
<para>update pkg ->
<itemizedlist>
<listitem>
<para>
<command>nix-env -i pkg-name -f &lt;path to your local nixpkgs folder&gt;</command>
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>add pkg ->
<itemizedlist>
<listitem>
<para>Make sure it's in <command>pkgs/top-level/all-packages.nix</command>
</para>
</listitem>
<listitem>
<para>
<command>nix-env -i pkg-name -f &lt;path to your local nixpkgs folder&gt;</command>
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
<emphasis>If you don't want to install pkg in you profile</emphasis>.
<itemizedlist>
<listitem>
<para>
<command>nix-build -A pkg-attribute-name &lt;path to your local nixpkgs folder&gt;/default.nix</command> and check results in the folder <command>result</command>. It will appear in the same directory where you did <command>nix-build</command>.</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>If you did <command>nix-env -i pkg-name</command> you can do <command>nix-env -e pkg-name</command> to uninstall it from your system.</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>NixOS and its modules:
<itemizedlist>
<listitem>
<para>You can add new module to your NixOS configuration file (usually it's <command>/etc/nixos/configuration.nix</command>).
And do <command>sudo nixos-rebuild test -I nixpkgs=&lt;path to your local nixpkgs folder&gt; --fast</command>.</para>
</listitem>
</itemizedlist>
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>If you have commits <command>pkg-name: oh, forgot to insert whitespace</command>: squash commits in this case. Use <command>git rebase -i</command>.</para>
</listitem>
<listitem>
<para>Rebase you branch against current <command>master</command>.</para>
</listitem>
</itemizedlist>
</section>
<section>
<title>Submitting changes</title>
<itemizedlist>
<listitem>
<para>Push your changes to your fork of nixpkgs.</para>
</listitem>
<listitem>
<para>Create pull request:
<itemizedlist>
<listitem>
<para>Write the title in format <command>(pkg-name | service): improvement</command>.
<itemizedlist>
<listitem>
<para>If you update the pkg, write versions <command>from -> to</command>.</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>Write in comment if you have tested your patch. Do not rely much on <command>TravisCI</command>.</para>
</listitem>
<listitem>
<para>If you make an improvement, write about your motivation.</para>
</listitem>
<listitem>
<para>Notify maintainers of the package. For example add to the message: <command>cc @jagajaga @domenkozar</command>.</para>
</listitem>
</itemizedlist>
</para>
</listitem>
</itemizedlist>
</section>
<section>
<title>Hotfixing pull requests</title>
<itemizedlist>
<listitem>
<para>Make the appropriate changes in you branch.</para>
</listitem>
<listitem>
<para>Don't create additional commits, do
<itemizedlist>
<listitem>
<para><command>git rebase -i</command></para>
</listitem>
<listitem>
<para>
<command>git push --force</command> to your branch.</para>
</listitem>
</itemizedlist>
</para>
</listitem>
</itemizedlist>
</section>
<section>
<title>Commit policy</title>
<itemizedlist>
<listitem>
<para>Commits must be sufficiently tested before being merged, both for the master and staging branches.</para>
</listitem>
<listitem>
<para>Hydra builds for master and staging should not be used as testing platform, it's a build farm for changes that have been already tested.</para>
</listitem>
<listitem>
<para>When changing the bootloader installation process, extra care must be taken. Grub installations cannot be rolled back, hence changes may break people's installations forever. For any non-trivial change to the bootloader please file a PR asking for review, especially from @edolstra.</para>
</listitem>
</itemizedlist>
<section>
<title>Master branch</title>
<itemizedlist>
<listitem>
<para>
Push your changes to your fork of nixpkgs.
</para>
</listitem>
<listitem>
<para>
Create pull request:
<itemizedlist>
<listitem>
<para>
Write the title in format <command>(pkg-name | nixos/&lt;module>):
improvement</command>.
<itemizedlist>
<listitem>
<para>
If you update the pkg, write versions <command>from -> to</command>.
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
Write in comment if you have tested your patch. Do not rely much on
<command>TravisCI</command>.
</para>
</listitem>
<listitem>
<para>
If you make an improvement, write about your motivation.
</para>
</listitem>
<listitem>
<para>
Notify maintainers of the package. For example add to the message:
<command>cc @jagajaga @domenkozar</command>.
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
It should only see non-breaking commits that do not cause mass rebuilds.
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="submitting-changes-pull-request-template">
<title>Pull Request Template</title>
</section>
<para>
The pull request template helps determine what steps have been made for a
contribution so far, and will help guide maintainers on the status of a
change. The motivation section of the PR should include any extra details
the title does not address and link any existing issues related to the pull
request.
</para>
<para>
When a PR is created, it will be pre-populated with some checkboxes detailed
below:
</para>
<section xml:id="submitting-changes-tested-with-sandbox">
<title>Tested using sandboxing</title>
<para>
When sandbox builds are enabled, Nix will setup an isolated environment for
each build process. It is used to remove further hidden dependencies set by
the build environment to improve reproducibility. This includes access to
the network during the build outside of <function>fetch*</function>
functions and files outside the Nix store. Depending on the operating
system access to other resources are blocked as well (ex. inter process
communication is isolated on Linux); see
<link
xlink:href="https://nixos.org/nix/manual/#description-45">build-use-sandbox</link>
in Nix manual for details.
</para>
<para>
Sandboxing is not enabled by default in Nix due to a small performance hit
on each build. In pull requests for
<link
xlink:href="https://github.com/NixOS/nixpkgs/">nixpkgs</link>
people are asked to test builds with sandboxing enabled (see
<literal>Tested using sandboxing</literal> in the pull request template)
because
in<link
xlink:href="https://nixos.org/hydra/">https://nixos.org/hydra/</link>
sandboxing is also used.
</para>
<para>
Depending if you use NixOS or other platforms you can use one of the
following methods to enable sandboxing
<emphasis role="bold">before</emphasis> building the package:
<itemizedlist>
<listitem>
<para>
<emphasis role="bold">Globally enable sandboxing on NixOS</emphasis>:
add the following to <filename>configuration.nix</filename>
<screen>nix.useSandbox = true;</screen>
</para>
</listitem>
<listitem>
<para>
<emphasis role="bold">Globally enable sandboxing on non-NixOS
platforms</emphasis>: add the following to:
<filename>/etc/nix/nix.conf</filename>
<screen>build-use-sandbox = true</screen>
</para>
</listitem>
</itemizedlist>
</para>
</section>
<section xml:id="submitting-changes-platform-diversity">
<title>Built on platform(s)</title>
<para>
Many Nix packages are designed to run on multiple platforms. As such, it's
important to let the maintainer know which platforms your changes have been
tested on. It's not always practical to test a change on all platforms, and
is not required for a pull request to be merged. Only check the systems you
tested the build on in this section.
</para>
</section>
<section xml:id="submitting-changes-nixos-tests">
<title>Tested via one or more NixOS test(s) if existing and applicable for the change (look inside nixos/tests)</title>
<para>
Packages with automated tests are much more likely to be merged in a timely
fashion because it doesn't require as much manual testing by the maintainer
to verify the functionality of the package. If there are existing tests for
the package, they should be run to verify your changes do not break the
tests. Tests only apply to packages with NixOS modules defined and can only
be run on Linux. For more details on writing and running tests, see the
<link
xlink:href="https://nixos.org/nixos/manual/index.html#sec-nixos-tests">section
in the NixOS manual</link>.
</para>
</section>
<section xml:id="submitting-changes-tested-compilation">
<title>Tested compilation of all pkgs that depend on this change using <command>nox-review</command></title>
<para>
If you are updating a package's version, you can use nox to make sure all
packages that depend on the updated package still compile correctly. This
can be done using the nox utility. The <command>nox-review</command>
utility can look for and build all dependencies either based on uncommited
changes with the <literal>wip</literal> option or specifying a github pull
request number.
</para>
<para>
review uncommitted changes:
<screen>nix-shell -p nox --run "nox-review wip"</screen>
</para>
<para>
review changes from pull request number 12345:
<screen>nix-shell -p nox --run "nox-review pr 12345"</screen>
</para>
</section>
<section xml:id="submitting-changes-tested-execution">
<title>Tested execution of all binary files (usually in <filename>./result/bin/</filename>)</title>
<para>
It's important to test any executables generated by a build when you change
or create a package in nixpkgs. This can be done by looking in
<filename>./result/bin</filename> and running any files in there, or at a
minimum, the main executable for the package. For example, if you make a
change to <package>texlive</package>, you probably would only check the
binaries associated with the change you made rather than testing all of
them.
</para>
</section>
<section xml:id="submitting-changes-contribution-standards">
<title>Meets Nixpkgs contribution standards</title>
<para>
The last checkbox is fits
<link
xlink:href="https://github.com/NixOS/nixpkgs/blob/master/.github/CONTRIBUTING.md">CONTRIBUTING.md</link>.
The contributing document has detailed information on standards the Nix
community has for commit messages, reviews, licensing of contributions you
make to the project, etc... Everyone should read and understand the
standards the community has for contributing before submitting a pull
request.
</para>
</section>
</section>
<section xml:id="submitting-changes-hotfixing-pull-requests">
<title>Hotfixing pull requests</title>
<section>
<title>Staging branch</title>
<itemizedlist>
<listitem>
<para>
Make the appropriate changes in you branch.
</para>
</listitem>
<listitem>
<para>
Don't create additional commits, do
<itemizedlist>
<listitem>
<para>
<command>git rebase -i</command>
</para>
</listitem>
<listitem>
<para>
<command>git push --force</command> to your branch.
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
It's only for non-breaking mass-rebuild commits. That means it's not to
be used for testing, and changes must have been well tested already.
<link xlink:href="http://comments.gmane.org/gmane.linux.distributions.nixos/13447">Read policy here</link>.
</para>
</listitem>
<listitem>
<para>
If the branch is already in a broken state, please refrain from adding
extra new breakages. Stabilize it for a few days, merge into master,
then resume development on staging.
<link xlink:href="http://hydra.nixos.org/jobset/nixpkgs/staging#tabs-evaluations">Keep an eye on the staging evaluations here</link>.
If any fixes for staging happen to be already in master, then master can
be merged into staging.
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="submitting-changes-commit-policy">
<title>Commit policy</title>
</section>
<section>
<title>Stable release branches</title>
<itemizedlist>
<listitem>
<para>
Commits must be sufficiently tested before being merged, both for the
master and staging branches.
</para>
</listitem>
<listitem>
<para>
Hydra builds for master and staging should not be used as testing
platform, it's a build farm for changes that have been already tested.
</para>
</listitem>
<listitem>
<para>
When changing the bootloader installation process, extra care must be
taken. Grub installations cannot be rolled back, hence changes may break
people's installations forever. For any non-trivial change to the
bootloader please file a PR asking for review, especially from @edolstra.
</para>
</listitem>
</itemizedlist>
<section xml:id="submitting-changes-master-branch">
<title>Master branch</title>
<itemizedlist>
<listitem>
<para>
It should only see non-breaking commits that do not cause mass rebuilds.
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="submitting-changes-staging-branch">
<title>Staging branch</title>
<itemizedlist>
<listitem>
<para>
It's only for non-breaking mass-rebuild commits. That means it's not to
be used for testing, and changes must have been well tested already.
<link xlink:href="https://web.archive.org/web/20160528180406/http://comments.gmane.org/gmane.linux.distributions.nixos/13447">Read
policy here</link>.
</para>
</listitem>
<listitem>
<para>
If the branch is already in a broken state, please refrain from adding
extra new breakages. Stabilize it for a few days, merge into master, then
resume development on staging.
<link xlink:href="http://hydra.nixos.org/jobset/nixpkgs/staging#tabs-evaluations">Keep
an eye on the staging evaluations here</link>. If any fixes for staging
happen to be already in master, then master can be merged into staging.
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="submitting-changes-stable-release-branches">
<title>Stable release branches</title>
<itemizedlist>
<listitem>
<para>
If you're cherry-picking a commit to a stable release branch, always use
<command>git cherry-pick -xe</command> and ensure the message contains a
clear description about why this needs to be included in the stable
branch.
</para>
<para>
An example of a cherry-picked commit would look like this:
</para>
<screen>
<para>
If you're cherry-picking a commit to a stable release branch, always use
<command>git cherry-pick -xe</command> and ensure the message contains a
clear description about why this needs to be included in the stable
branch.
</para>
<para>An example of a cherry-picked commit would look like this:</para>
<screen>
nixos: Refactor the world.
The original commit message describing the reason why the world was torn apart.
@@ -517,7 +324,9 @@ Reason: I just had a gut feeling that this would also be wanted by people from
the stone age.
</screen>
</listitem>
</itemizedlist>
</section>
</section>
</itemizedlist>
</section>
</section>
</chapter>

View File

@@ -1,26 +1,20 @@
{ lib }:
# Operations on attribute sets.
let
with {
inherit (builtins) head tail length;
inherit (lib.trivial) and;
inherit (lib.strings) concatStringsSep;
inherit (lib.lists) fold concatMap concatLists;
in
inherit (import ./trivial.nix) or;
inherit (import ./default.nix) fold;
inherit (import ./strings.nix) concatStringsSep;
inherit (import ./lists.nix) concatMap concatLists all deepSeqList;
};
rec {
inherit (builtins) attrNames listToAttrs hasAttr isAttrs getAttr;
/* Return an attribute from nested attribute sets.
Example:
x = { a = { b = 3; }; }
attrByPath ["a" "b"] 6 x
=> 3
attrByPath ["z" "z"] 6 x
=> 6
*/
/* Return an attribute from nested attribute sets. For instance
["x" "y"] applied to some set e returns e.x.y, if it exists. The
default value is returned otherwise. */
attrByPath = attrPath: default: e:
let attr = head attrPath;
in
@@ -30,15 +24,8 @@ rec {
else default;
/* Return if an attribute from nested attribute set exists.
Example:
x = { a = { b = 3; }; }
hasAttrByPath ["a" "b"] x
=> true
hasAttrByPath ["z" "z"] x
=> false
*/
For instance ["x" "y"] applied to some set e returns true, if e.x.y exists. False
is returned otherwise. */
hasAttrByPath = attrPath: e:
let attr = head attrPath;
in
@@ -48,28 +35,14 @@ rec {
else false;
/* Return nested attribute set in which an attribute is set.
Example:
setAttrByPath ["a" "b"] 3
=> { a = { b = 3; }; }
*/
/* Return nested attribute set in which an attribute is set. For instance
["x" "y"] applied with some value v returns `x.y = v;' */
setAttrByPath = attrPath: value:
if attrPath == [] then value
else listToAttrs
[ { name = head attrPath; value = setAttrByPath (tail attrPath) value; } ];
/* Like `getAttrPath' without a default value. If it doesn't find the
path it will throw.
Example:
x = { a = { b = 3; }; }
getAttrFromPath ["a" "b"] x
=> 3
getAttrFromPath ["z" "z"] x
=> error: cannot find attribute `z.z'
*/
getAttrFromPath = attrPath: set:
let errorMsg = "cannot find attribute `" + concatStringsSep "." attrPath + "'";
in attrByPath attrPath (abort errorMsg) set;
@@ -116,7 +89,7 @@ rec {
listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set));
/* Filter an attribute set recursively by removing all attributes for
/* Filter an attribute set recursivelly by removing all attributes for
which the given predicate return false.
Example:
@@ -136,16 +109,14 @@ rec {
) (attrNames set)
);
/* Apply fold functions to values grouped by key.
Example:
foldAttrs (n: a: [n] ++ a) [] [{ a = 2; } { a = 3; }]
=> { a = [ 2 3 ]; }
/* foldAttrs: apply fold functions to values grouped by key. Eg accumulate values as list:
foldAttrs (n: a: [n] ++ a) [] [{ a = 2; } { a = 3; }]
=> { a = [ 2 3 ]; }
*/
foldAttrs = op: nul: list_of_attrs:
fold (n: a:
fold (name: o:
o // { ${name} = op n.${name} (a.${name} or nul); }
o // (listToAttrs [{inherit name; value = op n.${name} (a.${name} or nul); }])
) a (attrNames n)
) {} list_of_attrs;
@@ -176,12 +147,7 @@ rec {
/* Utility function that creates a {name, value} pair as expected by
builtins.listToAttrs.
Example:
nameValuePair "some" 6
=> { name = "some"; value = 6; }
*/
builtins.listToAttrs. */
nameValuePair = name: value: { inherit name value; };
@@ -195,9 +161,8 @@ rec {
{ x = "foo"; y = "bar"; }
=> { x = "x-foo"; y = "y-bar"; }
*/
mapAttrs = builtins.mapAttrs or
(f: set:
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set)));
mapAttrs = f: set:
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set));
/* Like `mapAttrs', but allows the name of each attribute to be
@@ -283,78 +248,48 @@ rec {
listToAttrs (map (n: nameValuePair n (f n)) names);
/* Check whether the argument is a derivation. Any set with
{ type = "derivation"; } counts as a derivation.
Example:
nixpkgs = import <nixpkgs> {}
isDerivation nixpkgs.ruby
=> true
isDerivation "foobar"
=> false
*/
/* Check whether the argument is a derivation. */
isDerivation = x: isAttrs x && x ? type && x.type == "derivation";
/* Converts a store path to a fake derivation. */
/* Convert a store path to a fake derivation. */
toDerivation = path:
let
path' = builtins.storePath path;
res =
{ type = "derivation";
name = builtins.unsafeDiscardStringContext (builtins.substring 33 (-1) (baseNameOf path'));
outPath = path';
outputs = [ "out" ];
out = res;
outputName = "out";
};
in res;
let path' = builtins.storePath path; in
{ type = "derivation";
name = builtins.unsafeDiscardStringContext (builtins.substring 33 (-1) (baseNameOf path'));
outPath = path';
outputs = [ "out" ];
};
/* If `cond' is true, return the attribute set `as',
otherwise an empty attribute set.
Example:
optionalAttrs (true) { my = "set"; }
=> { my = "set"; }
optionalAttrs (false) { my = "set"; }
=> { }
*/
/* If the Boolean `cond' is true, return the attribute set `as',
otherwise an empty attribute set. */
optionalAttrs = cond: as: if cond then as else {};
/* Merge sets of attributes and use the function f to merge attributes
values.
Example:
zipAttrsWithNames ["a"] (name: vs: vs) [{a = "x";} {a = "y"; b = "z";}]
=> { a = ["x" "y"]; }
*/
values. */
zipAttrsWithNames = names: f: sets:
listToAttrs (map (name: {
inherit name;
value = f name (catAttrs name sets);
}) names);
/* Implementation note: Common names appear multiple times in the list of
names, hopefully this does not affect the system because the maximal
laziness avoid computing twice the same expression and listToAttrs does
not care about duplicated attribute names.
Example:
zipAttrsWith (name: values: values) [{a = "x";} {a = "y"; b = "z";}]
=> { a = ["x" "y"]; b = ["z"] }
*/
# implentation note: Common names appear multiple times in the list of
# names, hopefully this does not affect the system because the maximal
# laziness avoid computing twice the same expression and listToAttrs does
# not care about duplicated attribute names.
zipAttrsWith = f: sets: zipAttrsWithNames (concatMap attrNames sets) f sets;
/* Like `zipAttrsWith' with `(name: values: value)' as the function.
Example:
zipAttrs [{a = "x";} {a = "y"; b = "z";}]
=> { a = ["x" "y"]; b = ["z"] }
*/
zipAttrs = zipAttrsWith (name: values: values);
/* backward compatibility */
zipWithNames = zipAttrsWithNames;
zip = builtins.trace "lib.zip is deprecated, use lib.zipAttrsWith instead" zipAttrsWith;
/* Does the same as the update operator '//' except that attributes are
merged until the given predicate is verified. The predicate should
merged until the given pedicate is verified. The predicate should
accept 3 arguments which are the path to reach the attribute, a part of
the first attribute set and a part of the second attribute set. When
the predicate is verified, the value of the first attribute set is
@@ -384,16 +319,15 @@ rec {
recursiveUpdateUntil = pred: lhs: rhs:
let f = attrPath:
zipAttrsWith (n: values:
let here = attrPath ++ [n]; in
if tail values == []
|| pred here (head (tail values)) (head values) then
|| pred attrPath (head (tail values)) (head values) then
head values
else
f here values
f (attrPath ++ [n]) values
);
in f [] [rhs lhs];
/* A recursive variant of the update operator //. The recursion
/* A recursive variant of the update operator //. The recusion
stops when one of the attribute values is not an attribute set,
in which case the right hand side value takes precedence over the
left hand side value.
@@ -417,54 +351,18 @@ rec {
!(isAttrs lhs && isAttrs rhs)
) lhs rhs;
/* Returns true if the pattern is contained in the set. False otherwise.
Example:
matchAttrs { cpu = {}; } { cpu = { bits = 64; }; }
=> true
*/
matchAttrs = pattern: attrs: assert isAttrs pattern;
fold and true (attrValues (zipAttrsWithNames (attrNames pattern) (n: values:
matchAttrs = pattern: attrs:
fold or false (attrValues (zipAttrsWithNames (attrNames pattern) (n: values:
let pat = head values; val = head (tail values); in
if length values == 1 then false
else if isAttrs pat then isAttrs val && matchAttrs pat val
else if isAttrs pat then isAttrs val && matchAttrs head values
else pat == val
) [pattern attrs]));
/* Override only the attributes that are already present in the old set
useful for deep-overriding.
Example:
x = { a = { b = 4; c = 3; }; }
overrideExisting x { a = { b = 6; d = 2; }; }
=> { a = { b = 6; d = 2; }; }
*/
# override only the attributes that are already present in the old set
# useful for deep-overriding
overrideExisting = old: new:
old // listToAttrs (map (attr: nameValuePair attr (attrByPath [attr] old.${attr} new)) (attrNames old));
/* Get a package output.
If no output is found, fallback to `.out` and then to the default.
Example:
getOutput "dev" pkgs.openssl
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev"
*/
getOutput = output: pkg:
if pkg.outputUnspecified or false
then pkg.${output} or pkg.out or pkg
else pkg;
getBin = getOutput "bin";
getLib = getOutput "lib";
getDev = getOutput "dev";
/* Pick the outputs of packages to place in buildInputs */
chooseDevOutputs = drvs: builtins.map getDev drvs;
/*** deprecated stuff ***/
zipWithNames = zipAttrsWithNames;
zip = builtins.trace
"lib.zip is deprecated, use lib.zipAttrsWith instead" zipAttrsWith;
deepSeqAttrs = x: y: deepSeqList (attrValues x) y;
}

View File

@@ -1,5 +1,5 @@
{lib, pkgs}:
let inherit (lib) nvs; in
{lib, pkgs} :
let inherit (lib) nv nvs; in
{
# composableDerivation basically mixes these features:
@@ -19,7 +19,7 @@ let inherit (lib) nvs; in
# * vim_configurable
#
# A minimal example illustrating most features would look like this:
# let base = composableDerivation { (fixed: let inherit (fixed.fixed) name in {
# let base = composableDerivation { (fixed : let inherit (fixed.fixed) name in {
# src = fetchurl {
# }
# buildInputs = [A];
@@ -39,7 +39,7 @@ let inherit (lib) nvs; in
#
# issues:
# * its complicated to understand
# * some "features" such as exact merge behaviour are buried in mergeAttrBy
# * some "features" such as exact merge behaviour are burried in mergeAttrBy
# and defaultOverridableDelayableArgs assuming the default behaviour does
# the right thing in the common case
# * Eelco once said using such fix style functions are slow to evaluate
@@ -48,9 +48,9 @@ let inherit (lib) nvs; in
# / add patches the way you want without having to declare function arguments
#
# nice features:
# declaring "optional features" is modular. For instance:
# declaring "optional featuers" is modular. For instance:
# flags.curl = {
# configureFlags = ["--with-curl=${curl.dev}" "--with-curlwrappers"];
# configureFlags = ["--with-curl=${curl}" "--with-curlwrappers"];
# buildInputs = [curl openssl];
# };
# flags.other = { .. }
@@ -79,7 +79,7 @@ let inherit (lib) nvs; in
# consider adding addtional elements by derivation.merge { removeAttrs = ["elem"]; };
removeAttrs ? ["cfg" "flags"]
}: (lib.defaultOverridableDelayableArgs ( a: mkDerivation a)
}: (lib.defaultOverridableDelayableArgs ( a: mkDerivation a)
{
inherit applyPreTidy removeAttrs;
}).merge;

View File

@@ -1,19 +1,24 @@
{ lib }:
let
lib = import ./default.nix;
inherit (builtins) attrNames isFunction;
in
rec {
/* `overrideDerivation drv f' takes a derivation (i.e., the result
of a call to the builtin function `derivation') and returns a new
derivation in which the attributes of the original are overridden
derivation in which the attributes of the original are overriden
according to the function `f'. The function `f' is called with
the original derivation attributes.
`overrideDerivation' allows certain "ad-hoc" customisation
scenarios (e.g. in ~/.config/nixpkgs/config.nix). For instance,
if you want to "patch" the derivation returned by a package
function in Nixpkgs to build another version than what the
function itself provides, you can do something like this:
scenarios (e.g. in ~/.nixpkgs/config.nix). For instance, if you
want to "patch" the derivation returned by a package function in
Nixpkgs to build another version than what the function itself
provides, you can do something like this:
mySed = overrideDerivation pkgs.gnused (oldAttrs: {
name = "sed-4.2.2-pre";
@@ -31,7 +36,7 @@ rec {
overrideDerivation = drv: f:
let
newDrv = derivation (drv.drvAttrs // (f drv));
in lib.flip (extendDerivation true) newDrv (
in addPassthru newDrv (
{ meta = drv.meta or {};
passthru = if drv ? passthru then drv.passthru else {};
}
@@ -46,41 +51,21 @@ rec {
else { }));
/* `makeOverridable` takes a function from attribute set to attribute set and
injects `override` attibute which can be used to override arguments of
the function.
nix-repl> x = {a, b}: { result = a + b; }
nix-repl> y = lib.makeOverridable x { a = 1; b = 2; }
nix-repl> y
{ override = «lambda»; overrideDerivation = «lambda»; result = 3; }
nix-repl> y.override { a = 10; }
{ override = «lambda»; overrideDerivation = «lambda»; result = 12; }
Please refer to "Nixpkgs Contributors Guide" section
"<pkg>.overrideDerivation" to learn about `overrideDerivation` and caveats
related to its use.
*/
makeOverridable = f: origArgs:
let
ff = f origArgs;
overrideWith = newArgs: origArgs // (if lib.isFunction newArgs then newArgs origArgs else newArgs);
overrideWith = newArgs: origArgs // (if builtins.isFunction newArgs then newArgs origArgs else newArgs);
in
if builtins.isAttrs ff then (ff // {
override = newArgs: makeOverridable f (overrideWith newArgs);
overrideDerivation = fdrv:
makeOverridable (args: overrideDerivation (f args) fdrv) origArgs;
${if ff ? overrideAttrs then "overrideAttrs" else null} = fdrv:
makeOverridable (args: (f args).overrideAttrs fdrv) origArgs;
})
else if lib.isFunction ff then {
override = newArgs: makeOverridable f (overrideWith newArgs);
__functor = self: ff;
overrideDerivation = throw "overrideDerivation not yet supported for functors";
}
if builtins.isAttrs ff then (ff //
{ override = newArgs: makeOverridable f (overrideWith newArgs);
overrideDerivation = fdrv:
makeOverridable (args: overrideDerivation (f args) fdrv) origArgs;
})
else if builtins.isFunction ff then
{ override = newArgs: makeOverridable f (overrideWith newArgs);
__functor = self: ff;
overrideDerivation = throw "overrideDerivation not yet supported for functors";
}
else ff;
@@ -107,8 +92,8 @@ rec {
*/
callPackageWith = autoArgs: fn: args:
let
f = if lib.isFunction fn then fn else import fn;
auto = builtins.intersectAttrs (lib.functionArgs f) autoArgs;
f = if builtins.isFunction fn then fn else import fn;
auto = builtins.intersectAttrs (builtins.functionArgs f) autoArgs;
in makeOverridable f (auto // args);
@@ -117,17 +102,19 @@ rec {
individual attributes. */
callPackagesWith = autoArgs: fn: args:
let
f = if lib.isFunction fn then fn else import fn;
auto = builtins.intersectAttrs (lib.functionArgs f) autoArgs;
origArgs = auto // args;
pkgs = f origArgs;
mkAttrOverridable = name: pkg: makeOverridable (newArgs: (f newArgs).${name}) origArgs;
f = if builtins.isFunction fn then fn else import fn;
auto = builtins.intersectAttrs (builtins.functionArgs f) autoArgs;
finalArgs = auto // args;
pkgs = f finalArgs;
mkAttrOverridable = name: pkg: pkg // {
override = newArgs: mkAttrOverridable name (f (finalArgs // newArgs)).${name};
};
in lib.mapAttrs mkAttrOverridable pkgs;
/* Add attributes to each output of a derivation without changing
the derivation itself and check a given condition when evaluating. */
extendDerivation = condition: passthru: drv:
the derivation itself. */
addPassthru = drv: passthru:
let
outputs = drv.outputs or [ "out" ];
@@ -137,18 +124,13 @@ rec {
outputToAttrListElement = outputName:
{ name = outputName;
value = commonAttrs // {
inherit (drv.${outputName}) type outputName;
drvPath = assert condition; drv.${outputName}.drvPath;
outPath = assert condition; drv.${outputName}.outPath;
inherit (drv.${outputName}) outPath drvPath type outputName;
};
};
outputsList = map outputToAttrListElement outputs;
in commonAttrs // {
outputUnspecified = true;
drvPath = assert condition; drv.drvPath;
outPath = assert condition; drv.outPath;
};
in commonAttrs.${drv.outputName};
/* Strip a derivation of all non-essential attributes, returning
only those needed by hydra-eval-jobs. Also strictly evaluate the
@@ -185,7 +167,7 @@ rec {
/* Make a set of packages with a common scope. All packages called
with the provided `callPackage' will be evaluated with the same
arguments. Any package in the set may depend on any other. The
`overrideScope'` function allows subsequent modification of the package
`override' function allows subsequent modification of the package
set in a consistent way, i.e. all packages in the set will be
called with the overridden packages. The package sets may be
hierarchical: the packages in the set are called with the scope
@@ -195,11 +177,9 @@ rec {
let self = f self // {
newScope = scope: newScope (self // scope);
callPackage = self.newScope {};
overrideScope = g: lib.warn
"`overrideScope` (from `lib.makeScope`) is deprecated. Do `overrideScope' (self: super: { })` instead of `overrideScope (super: self: { })`. All other overrides have the parameters in that order, including other definitions of `overrideScope`. This was the only definition violating the pattern."
(makeScope newScope (lib.fixedPoints.extends (lib.flip g) f));
overrideScope' = g: makeScope newScope (lib.fixedPoints.extends g f);
packages = f;
override = g: makeScope newScope (self_:
let super = f self_;
in super // g super self_);
};
in self;

View File

@@ -1,155 +1,62 @@
/* Collection of functions useful for debugging
broken nix expressions.
let lib = import ./default.nix;
inherit (builtins) trace attrNamesToStr isAttrs isFunction isList isInt
isString isBool head substring attrNames;
inherit (lib) all id mapAttrsFlatten elem;
* `trace`-like functions take two values, print
the first to stderr and return the second.
* `traceVal`-like functions take one argument
which both printed and returned.
* `traceSeq`-like functions fully evaluate their
traced value before printing (not just to weak
head normal form like trace does by default).
* Functions that end in `-Fn` take an additional
function as their first argument, which is applied
to the traced value before it is printed.
*/
{ lib }:
let
inherit (builtins) trace isAttrs isList isInt
head substring attrNames;
inherit (lib) id elem isFunction;
in
rec {
# -- TRACING --
inherit (builtins) addErrorContext;
/* Conditionally trace the supplied message, based on a predicate.
addErrorContextToAttrs = lib.mapAttrs (a: v: lib.addErrorContext "while evaluating ${a}" v);
Type: traceIf :: bool -> string -> a -> a
traceIf = p: msg: x: if p then trace msg x else x;
Example:
traceIf true "hello" 3
trace: hello
=> 3
traceVal = x: trace x x;
traceXMLVal = x: trace (builtins.toXML x) x;
traceXMLValMarked = str: x: trace (str + builtins.toXML x) x;
# this can help debug your code as well - designed to not produce thousands of lines
traceShowVal = x : trace (showVal x) x;
traceShowValMarked = str: x: trace (str + showVal x) x;
attrNamesToStr = a : lib.concatStringsSep "; " (map (x : "${x}=") (attrNames a));
showVal = x :
if isAttrs x then
if x ? outPath then "x is a derivation, name ${if x ? name then x.name else "<no name>"}, { ${attrNamesToStr x} }"
else "x is attr set { ${attrNamesToStr x} }"
else if isFunction x then "x is a function"
else if x == [] then "x is an empty list"
else if isList x then "x is a list, first element is: ${showVal (head x)}"
else if x == true then "x is boolean true"
else if x == false then "x is boolean false"
else if x == null then "x is null"
else if isInt x then "x is an integer `${toString x}'"
else if isString x then "x is a string `${substring 0 50 x}...'"
else "x is probably a path `${substring 0 50 (toString x)}...'";
# trace the arguments passed to function and its result
# maybe rewrite these functions in a traceCallXml like style. Then one function is enough
traceCall = n : f : a : let t = n2 : x : traceShowValMarked "${n} ${n2}:" x; in t "result" (f (t "arg 1" a));
traceCall2 = n : f : a : b : let t = n2 : x : traceShowValMarked "${n} ${n2}:" x; in t "result" (f (t "arg 1" a) (t "arg 2" b));
traceCall3 = n : f : a : b : c : let t = n2 : x : traceShowValMarked "${n} ${n2}:" x; in t "result" (f (t "arg 1" a) (t "arg 2" b) (t "arg 3" c));
# FIXME: rename this?
traceValIfNot = c: x:
if c x then true else trace (showVal x) false;
/* Evaluate a set of tests. A test is an attribute set {expr,
expected}, denoting an expression and its expected result. The
result is a list of failed tests, each represented as {name,
expected, actual}, denoting the attribute name of the failing
test and its expected and actual results. Used for regression
testing of the functions in lib; see tests.nix for an example.
Only tests having names starting with "test" are run.
Add attr { tests = ["testName"]; } to run these test only
*/
traceIf =
# Predicate to check
pred:
# Message that should be traced
msg:
# Value to return
x: if pred then trace msg x else x;
/* Trace the supplied value after applying a function to it, and
return the original value.
Type: traceValFn :: (a -> b) -> a -> a
Example:
traceValFn (v: "mystring ${v}") "foo"
trace: mystring foo
=> "foo"
*/
traceValFn =
# Function to apply
f:
# Value to trace and return
x: trace (f x) x;
/* Trace the supplied value and return it.
Type: traceVal :: a -> a
Example:
traceVal 42
# trace: 42
=> 42
*/
traceVal = traceValFn id;
/* `builtins.trace`, but the value is `builtins.deepSeq`ed first.
Type: traceSeq :: a -> b -> b
Example:
trace { a.b.c = 3; } null
trace: { a = <CODE>; }
=> null
traceSeq { a.b.c = 3; } null
trace: { a = { b = { c = 3; }; }; }
=> null
*/
traceSeq =
# The value to trace
x:
# The value to return
y: trace (builtins.deepSeq x x) y;
/* Like `traceSeq`, but only evaluate down to depth n.
This is very useful because lots of `traceSeq` usages
lead to an infinite recursion.
Example:
traceSeqN 2 { a.b.c = 3; } null
trace: { a = { b = {}; }; }
=> null
*/
traceSeqN = depth: x: y: with lib;
let snip = v: if isList v then noQuotes "[]" v
else if isAttrs v then noQuotes "{}" v
else v;
noQuotes = str: v: { __pretty = const str; val = v; };
modify = n: fn: v: if (n == 0) then fn v
else if isList v then map (modify (n - 1) fn) v
else if isAttrs v then mapAttrs
(const (modify (n - 1) fn)) v
else v;
in trace (generators.toPretty { allowPrettyValues = true; }
(modify depth snip x)) y;
/* A combination of `traceVal` and `traceSeq` that applies a
provided function to the value to be traced after `deepSeq`ing
it.
*/
traceValSeqFn =
# Function to apply
f:
# Value to trace
v: traceValFn f (builtins.deepSeq v v);
/* A combination of `traceVal` and `traceSeq`. */
traceValSeq = traceValSeqFn id;
/* A combination of `traceVal` and `traceSeqN` that applies a
provided function to the value to be traced. */
traceValSeqNFn =
# Function to apply
f:
depth:
# Value to trace
v: traceSeqN depth (f v) v;
/* A combination of `traceVal` and `traceSeqN`. */
traceValSeqN = traceValSeqNFn id;
# -- TESTING --
/* Evaluate a set of tests. A test is an attribute set `{expr,
expected}`, denoting an expression and its expected result. The
result is a list of failed tests, each represented as `{name,
expected, actual}`, denoting the attribute name of the failing
test and its expected and actual results.
Used for regression testing of the functions in lib; see
tests.nix for an example. Only tests having names starting with
"test" are run.
Add attr { tests = ["testName"]; } to run these tests only.
*/
runTests =
# Tests to run
tests: lib.concatLists (lib.attrValues (lib.mapAttrs (name: test:
runTests = tests: lib.concatLists (lib.attrValues (lib.mapAttrs (name: test:
let testsToRun = if tests ? tests then tests.tests else [];
in if (substring 0 4 name == "test" || elem name testsToRun)
&& ((testsToRun == []) || elem name tests.tests)
@@ -158,75 +65,31 @@ rec {
then [ { inherit name; expected = test.expected; result = test.expr; } ]
else [] ) tests));
/* Create a test assuming that list elements are `true`.
# create a test assuming that list elements are true
# usage: { testX = allTrue [ true ]; }
testAllTrue = expr : { inherit expr; expected = map (x: true) expr; };
Example:
{ testX = allTrue [ true ]; }
*/
testAllTrue = expr: { inherit expr; expected = map (x: true) expr; };
# -- DEPRECATED --
traceShowVal = x: trace (showVal x) x;
traceShowValMarked = str: x: trace (str + showVal x) x;
attrNamesToStr = a:
trace ( "Warning: `attrNamesToStr` is deprecated "
+ "and will be removed in the next release. "
+ "Please use more specific concatenation "
+ "for your uses (`lib.concat(Map)StringsSep`)." )
(lib.concatStringsSep "; " (map (x: "${x}=") (attrNames a)));
showVal = with lib;
trace ( "Warning: `showVal` is deprecated "
+ "and will be removed in the next release, "
+ "please use `traceSeqN`" )
(let
modify = v:
let pr = f: { __pretty = f; val = v; };
in if isDerivation v then pr
(drv: "<δ:${drv.name}:${concatStringsSep ","
(attrNames drv)}>")
else if [] == v then pr (const "[]")
else if isList v then pr (l: "[ ${go (head l)}, ]")
else if isAttrs v then pr
(a: "{ ${ concatStringsSep ", " (attrNames a)} }")
else v;
go = x: generators.toPretty
{ allowPrettyValues = true; }
(modify x);
in go);
traceXMLVal = x:
trace ( "Warning: `traceXMLVal` is deprecated "
+ "and will be removed in the next release. "
+ "Please use `traceValFn builtins.toXML`." )
(trace (builtins.toXML x) x);
traceXMLValMarked = str: x:
trace ( "Warning: `traceXMLValMarked` is deprecated "
+ "and will be removed in the next release. "
+ "Please use `traceValFn (x: str + builtins.toXML x)`." )
(trace (str + builtins.toXML x) x);
# trace the arguments passed to function and its result
# maybe rewrite these functions in a traceCallXml like style. Then one function is enough
traceCall = n: f: a: let t = n2: x: traceShowValMarked "${n} ${n2}:" x; in t "result" (f (t "arg 1" a));
traceCall2 = n: f: a: b: let t = n2: x: traceShowValMarked "${n} ${n2}:" x; in t "result" (f (t "arg 1" a) (t "arg 2" b));
traceCall3 = n: f: a: b: c: let t = n2: x: traceShowValMarked "${n} ${n2}:" x; in t "result" (f (t "arg 1" a) (t "arg 2" b) (t "arg 3" c));
traceValIfNot = c: x:
trace ( "Warning: `traceValIfNot` is deprecated "
+ "and will be removed in the next release. "
+ "Please use `if/then/else` and `traceValSeq 1`.")
(if c x then true else traceSeq (showVal x) false);
addErrorContextToAttrs = attrs:
trace ( "Warning: `addErrorContextToAttrs` is deprecated "
+ "and will be removed in the next release. "
+ "Please use `builtins.addErrorContext` directly." )
(lib.mapAttrs (a: v: lib.addErrorContext "while evaluating ${a}" v) attrs);
# evaluate everything once so that errors will occur earlier
# hacky: traverse attrs by adding a dummy
# ignores functions (should this behavior change?) See strictf
#
# Note: This should be a primop! Something like seq of haskell would be nice to
# have as well. It's used fore debugging only anyway
strict = x :
let
traverse = x :
if isString x then true
else if isAttrs x then
if x ? outPath then true
else all id (mapAttrsFlatten (n: traverse) x)
else if isList x then
all id (map traverse x)
else if isBool x then true
else if isFunction x then true
else if isInt x then true
else if x == null then true
else true; # a (store) path?
in if traverse x then x else throw "else never reached";
# example: (traceCallXml "myfun" id 3) will output something like
# calling myfun arg 1: 3 result: 3
@@ -234,20 +97,17 @@ rec {
# note: if result doesn't evaluate you'll get no trace at all (FIXME)
# args should be printed in any case
traceCallXml = a:
trace ( "Warning: `traceCallXml` is deprecated "
+ "and will be removed in the next release. "
+ "Please complain if you use the function regularly." )
(if !isInt a then
if !isInt a then
traceCallXml 1 "calling ${a}\n"
else
let nr = a;
in (str: expr:
if isFunction expr then
(arg:
traceCallXml (builtins.add 1 nr) "${str}\n arg ${builtins.toString nr} is \n ${builtins.toXML (builtins.seq arg arg)}" (expr arg)
traceCallXml (builtins.add 1 nr) "${str}\n arg ${builtins.toString nr} is \n ${builtins.toXML (strict arg)}" (expr arg)
)
else
let r = builtins.seq expr expr;
let r = strict expr;
in trace "${str}\n result:\n${builtins.toXML r}" r
));
);
}

View File

@@ -1,136 +1,31 @@
/* Library of low-level helper functions for nix expressions.
*
* Please implement (mostly) exhaustive unit tests
* for new functions in `./tests.nix'.
*/
let
let
inherit (import ./fixed-points.nix {}) makeExtensible;
trivial = import ./trivial.nix;
lists = import ./lists.nix;
strings = import ./strings.nix;
stringsWithDeps = import ./strings-with-deps.nix;
attrsets = import ./attrsets.nix;
sources = import ./sources.nix;
modules = import ./modules.nix;
options = import ./options.nix;
types = import ./types.nix;
meta = import ./meta.nix;
debug = import ./debug.nix;
misc = import ./deprecated.nix;
maintainers = import ./maintainers.nix;
platforms = import ./platforms.nix;
systems = import ./systems.nix;
customisation = import ./customisation.nix;
licenses = import ./licenses.nix;
sandbox = import ./sandbox.nix;
lib = makeExtensible (self: let
callLibs = file: import file { lib = self; };
in with self; {
# often used, or depending on very little
trivial = callLibs ./trivial.nix;
fixedPoints = callLibs ./fixed-points.nix;
# datatypes
attrsets = callLibs ./attrsets.nix;
lists = callLibs ./lists.nix;
strings = callLibs ./strings.nix;
stringsWithDeps = callLibs ./strings-with-deps.nix;
# packaging
customisation = callLibs ./customisation.nix;
maintainers = import ../maintainers/maintainer-list.nix;
meta = callLibs ./meta.nix;
sources = callLibs ./sources.nix;
versions = callLibs ./versions.nix;
# module system
modules = callLibs ./modules.nix;
options = callLibs ./options.nix;
types = callLibs ./types.nix;
# constants
licenses = callLibs ./licenses.nix;
systems = callLibs ./systems;
# misc
debug = callLibs ./debug.nix;
generators = callLibs ./generators.nix;
misc = callLibs ./deprecated.nix;
# domain-specific
fetchers = callLibs ./fetchers.nix;
# Eval-time filesystem handling
filesystem = callLibs ./filesystem.nix;
# back-compat aliases
platforms = systems.forMeta;
inherit (builtins) add addErrorContext attrNames concatLists
deepSeq elem elemAt filter genericClosure genList getAttr
hasAttr head isAttrs isBool isInt isList isString length
lessThan listToAttrs pathExists readFile replaceStrings seq
stringLength sub substring tail;
inherit (trivial) id const concat or and bitAnd bitOr bitXor bitNot
boolToString mergeAttrs flip mapNullable inNixShell min max
importJSON warn info nixpkgsVersion version mod compare
splitByAndCompare functionArgs setFunctionArgs isFunction;
inherit (fixedPoints) fix fix' extends composeExtensions
makeExtensible makeExtensibleWithCustomName;
inherit (attrsets) attrByPath hasAttrByPath setAttrByPath
getAttrFromPath attrVals attrValues catAttrs filterAttrs
filterAttrsRecursive foldAttrs collect nameValuePair mapAttrs
mapAttrs' mapAttrsToList mapAttrsRecursive mapAttrsRecursiveCond
genAttrs isDerivation toDerivation optionalAttrs
zipAttrsWithNames zipAttrsWith zipAttrs recursiveUpdateUntil
recursiveUpdate matchAttrs overrideExisting getOutput getBin
getLib getDev chooseDevOutputs zipWithNames zip;
inherit (lists) singleton foldr fold foldl foldl' imap0 imap1
concatMap flatten remove findSingle findFirst any all count
optional optionals toList range partition zipListsWith zipLists
reverseList listDfs toposort sort naturalSort compareLists take
drop sublist last init crossLists unique intersectLists
subtractLists mutuallyExclusive groupBy groupBy';
inherit (strings) concatStrings concatMapStrings concatImapStrings
intersperse concatStringsSep concatMapStringsSep
concatImapStringsSep makeSearchPath makeSearchPathOutput
makeLibraryPath makeBinPath makePerlPath makeFullPerlPath optionalString
hasPrefix hasSuffix stringToCharacters stringAsChars escape
escapeShellArg escapeShellArgs replaceChars lowerChars
upperChars toLower toUpper addContextFrom splitString
removePrefix removeSuffix versionOlder versionAtLeast getVersion
nameFromURL enableFeature enableFeatureAs withFeature
withFeatureAs fixedWidthString fixedWidthNumber isStorePath
toInt readPathsFromFile fileContents;
inherit (stringsWithDeps) textClosureList textClosureMap
noDepEntry fullDepEntry packEntry stringAfter;
inherit (customisation) overrideDerivation makeOverridable
callPackageWith callPackagesWith extendDerivation hydraJob
makeScope;
inherit (meta) addMetaAttrs dontDistribute setName updateName
appendToName mapDerivationAttrset lowPrio lowPrioSet hiPrio
hiPrioSet;
inherit (sources) pathType pathIsDirectory cleanSourceFilter
cleanSource sourceByRegex sourceFilesBySuffices
commitIdFromGitRepo cleanSourceWith pathHasContext
canCleanSource;
inherit (modules) evalModules closeModules unifyModuleSyntax
applyIfFunction unpackSubmodule packSubmodule mergeModules
mergeModules' mergeOptionDecls evalOptionValue mergeDefinitions
pushDownProperties dischargeProperties filterOverrides
sortProperties fixupOptionType mkIf mkAssert mkMerge mkOverride
mkOptionDefault mkDefault mkForce mkVMOverride mkStrict
mkFixStrictness mkOrder mkBefore mkAfter mkAliasDefinitions
mkAliasAndWrapDefinitions fixMergeModules mkRemovedOptionModule
mkRenamedOptionModule mkMergedOptionModule mkChangedOptionModule
mkAliasOptionModule doRename filterModules;
inherit (options) isOption mkEnableOption mkSinkUndeclaredOptions
mergeDefaultOption mergeOneOption mergeEqualOption getValues
getFiles optionAttrSetToDocList optionAttrSetToDocList'
scrubOptionValue literalExample showOption showFiles
unknownModule mkOption;
inherit (types) isType setType defaultTypeMerge defaultFunctor
isOptionType mkOptionType;
inherit (debug) addErrorContextToAttrs traceIf traceVal traceValFn
traceXMLVal traceXMLValMarked traceSeq traceSeqN traceValSeq
traceValSeqFn traceValSeqN traceValSeqNFn traceShowVal
traceShowValMarked showVal traceCall traceCall2 traceCall3
traceValIfNot runTests testAllTrue traceCallXml attrNamesToStr;
inherit (misc) maybeEnv defaultMergeArg defaultMerge foldArgs
defaultOverridableDelayableArgs composedArgsAndFun
maybeAttrNullable maybeAttr ifEnable checkFlag getValue
checkReqs uniqList uniqListExt condConcat lazyGenericClosure
innerModifySumArgs modifySumArgs innerClosePropagation
closePropagation mapAttrsFlatten nvs setAttr setAttrMerge
mergeAttrsWithFunc mergeAttrsConcatenateValues
mergeAttrsNoOverride mergeAttrByFunc mergeAttrsByFuncDefaults
mergeAttrsByFuncDefaultsClean mergeAttrBy prepareDerivationArgs
nixType imap overridableDelayableArgs;
});
in lib
in
{ inherit trivial lists strings stringsWithDeps attrsets sources options
modules types meta debug maintainers licenses platforms systems sandbox;
}
# !!! don't include everything at top-level; perhaps only the most
# commonly used functions.
// trivial // lists // strings // stringsWithDeps // attrsets // sources
// options // types // meta // debug // misc // modules
// systems
// customisation

View File

@@ -1,12 +1,11 @@
{ lib }:
let
inherit (builtins) head tail isList isAttrs isInt attrNames;
let lib = import ./default.nix;
inherit (builtins) isFunction head tail isList isAttrs isInt attrNames;
in
with lib.lists;
with lib.attrsets;
with lib.strings;
with import ./lists.nix;
with import ./attrsets.nix;
with import ./strings.nix;
rec {
@@ -17,23 +16,23 @@ rec {
defaultMergeArg = x : y: if builtins.isAttrs y then
y
else
else
(y x);
defaultMerge = x: y: x // (defaultMergeArg x y);
foldArgs = merger: f: init: x:
let arg = (merger init (defaultMergeArg init x));
# now add the function with composed args already applied to the final attrs
base = (setAttrMerge "passthru" {} (f arg)
( z: z // rec {
function = foldArgs merger f arg;
args = (lib.attrByPath ["passthru" "args"] {} z) // x;
foldArgs = merger: f: init: x:
let arg=(merger init (defaultMergeArg init x));
# now add the function with composed args already applied to the final attrs
base = (setAttrMerge "passthru" {} (f arg)
( z : z // rec {
function = foldArgs merger f arg;
args = (lib.attrByPath ["passthru" "args"] {} z) // x;
} ));
withStdOverrides = base // {
override = base.passthru.function;
};
withStdOverrides = base // {
override = base.passthru.function;
} ;
in
withStdOverrides;
withStdOverrides;
# predecessors: proposed replacement for applyAndFun (which has a bug cause it merges twice)
# the naming "overridableDelayableArgs" tries to express that you can
@@ -50,35 +49,35 @@ rec {
#
# examples: see test cases "res" below;
overridableDelayableArgs =
f: # the function applied to the arguments
initial: # you pass attrs, the functions below are passing a function taking the fix argument
f : # the function applied to the arguments
initial : # you pass attrs, the functions below are passing a function taking the fix argument
let
takeFixed = if lib.isFunction initial then initial else (fixed : initial); # transform initial to an expression always taking the fixed argument
tidy = args:
takeFixed = if isFunction initial then initial else (fixed : initial); # transform initial to an expression always taking the fixed argument
tidy = args :
let # apply all functions given in "applyPreTidy" in sequence
applyPreTidyFun = fold ( n: a: x: n ( a x ) ) lib.id (maybeAttr "applyPreTidy" [] args);
applyPreTidyFun = fold ( n : a : x : n ( a x ) ) lib.id (maybeAttr "applyPreTidy" [] args);
in removeAttrs (applyPreTidyFun args) ( ["applyPreTidy"] ++ (maybeAttr "removeAttrs" [] args) ); # tidy up args before applying them
fun = n: x:
let newArgs = fixed:
let args = takeFixed fixed;
mergeFun = args.${n};
in if isAttrs x then (mergeFun args x)
else assert lib.isFunction x;
mergeFun args (x ( args // { inherit fixed; }));
in overridableDelayableArgs f newArgs;
fun = n : x :
let newArgs = fixed :
let args = takeFixed fixed;
mergeFun = args.${n};
in if isAttrs x then (mergeFun args x)
else assert isFunction x;
mergeFun args (x ( args // { inherit fixed; }));
in overridableDelayableArgs f newArgs;
in
(f (tidy (lib.fix takeFixed))) // {
merge = fun "mergeFun";
replace = fun "keepFun";
};
defaultOverridableDelayableArgs = f:
defaultOverridableDelayableArgs = f :
let defaults = {
mergeFun = mergeAttrByFunc; # default merge function. merge strategie (concatenate lists, strings) is given by mergeAttrBy
keepFun = a: b: { inherit (a) removeAttrs mergeFun keepFun mergeAttrBy; } // b; # even when using replace preserve these values
keepFun = a : b : { inherit (a) removeAttrs mergeFun keepFun mergeAttrBy; } // b; # even when using replace preserve these values
applyPreTidy = []; # list of functions applied to args before args are tidied up (usage case : prepareDerivationArgs)
mergeAttrBy = mergeAttrBy // {
applyPreTidy = a: b: a ++ b;
removeAttrs = a: b: a ++ b;
applyPreTidy = a : b : a ++ b;
removeAttrs = a : b: a ++ b;
};
removeAttrs = ["mergeFun" "keepFun" "mergeAttrBy" "removeAttrs" "fixed" ]; # before applying the arguments to the function make sure these names are gone
};
@@ -87,7 +86,7 @@ rec {
# rec { # an example of how composedArgsAndFun can be used
# a = composedArgsAndFun (x: x) { a = ["2"]; meta = { d = "bar";}; };
# a = composedArgsAndFun (x : x) { a = ["2"]; meta = { d = "bar";}; };
# # meta.d will be lost ! It's your task to preserve it (eg using a merge function)
# b = a.passthru.function { a = [ "3" ]; meta = { d2 = "bar2";}; };
# # instead of passing/ overriding values you can use a merge function:
@@ -120,7 +119,7 @@ rec {
else if val == true || val == false then false
else null;
# Return true only if there is an attribute and it is true.
checkFlag = attrSet: name:
if name == "true" then true else
@@ -135,29 +134,29 @@ rec {
( attrByPath [name] (if checkFlag attrSet name then true else
if argList == [] then null else
let x = builtins.head argList; in
if (head x) == name then
if (head x) == name then
(head (tail x))
else (getValue attrSet
else (getValue attrSet
(tail argList) name)) attrSet );
# Input : attrSet, [[name default] ...], [ [flagname reqs..] ... ]
# Output : are reqs satisfied? It's asserted.
checkReqs = attrSet: argList: condList:
checkReqs = attrSet : argList : condList :
(
fold lib.and true
(map (x: let name = (head x); in
((checkFlag attrSet name) ->
fold lib.and true
(map (x: let name = (head x) ; in
((checkFlag attrSet name) ->
(fold lib.and true
(map (y: let val=(getValue attrSet argList y); in
(val!=null) && (val!=false))
(tail x))))) condList));
(val!=null) && (val!=false))
(tail x))))) condList)) ;
# This function has O(n^2) performance.
uniqList = { inputList, acc ? [] }:
let go = xs: acc:
uniqList = {inputList, acc ? []} :
let go = xs : acc :
if xs == []
then []
else let x = head xs;
@@ -165,26 +164,26 @@ rec {
in y ++ go (tail xs) (y ++ acc);
in go inputList acc;
uniqListExt = { inputList,
outputList ? [],
getter ? (x: x),
compare ? (x: y: x==y) }:
uniqListExt = {inputList, outputList ? [],
getter ? (x : x), compare ? (x: y: x==y)}:
if inputList == [] then outputList else
let x = head inputList;
isX = y: (compare (getter y) (getter x));
newOutputList = outputList ++
(if any isX outputList then [] else [x]);
in uniqListExt { outputList = newOutputList;
inputList = (tail inputList);
inherit getter compare;
};
let x=head inputList;
isX = y: (compare (getter y) (getter x));
newOutputList = outputList ++
(if any isX outputList then [] else [x]);
in uniqListExt {outputList=newOutputList;
inputList = (tail inputList);
inherit getter compare;
};
condConcat = name: list: checker:
if list == [] then name else
if checker (head list) then
condConcat
(name + (head (tail list)))
(tail (tail list))
if checker (head list) then
condConcat
(name + (head (tail list)))
(tail (tail list))
checker
else condConcat
name (tail (tail list)) checker;
@@ -203,12 +202,12 @@ rec {
in
work startSet [] [];
innerModifySumArgs = f: x: a: b: if b == null then (f a b) // x else
innerModifySumArgs = f: x: a: b: if b == null then (f a b) // x else
innerModifySumArgs f x (a // b);
modifySumArgs = f: x: innerModifySumArgs f x {};
innerClosePropagation = acc: xs:
innerClosePropagation = acc : xs :
if xs == []
then acc
else let y = head xs;
@@ -228,45 +227,45 @@ rec {
closePropagation = list: (uniqList {inputList = (innerClosePropagation [] list);});
# calls a function (f attr value ) for each record item. returns a list
mapAttrsFlatten = f: r: map (attr: f attr r.${attr}) (attrNames r);
mapAttrsFlatten = f : r : map (attr: f attr r.${attr}) (attrNames r);
# attribute set containing one attribute
nvs = name: value: listToAttrs [ (nameValuePair name value) ];
nvs = name : value : listToAttrs [ (nameValuePair name value) ];
# adds / replaces an attribute of an attribute set
setAttr = set: name: v: set // (nvs name v);
setAttr = set : name : v : set // (nvs name v);
# setAttrMerge (similar to mergeAttrsWithFunc but only merges the values of a particular name)
# setAttrMerge "a" [] { a = [2];} (x: x ++ [3]) -> { a = [2 3]; }
# setAttrMerge "a" [] { } (x: x ++ [3]) -> { a = [ 3]; }
setAttrMerge = name: default: attrs: f:
# setAttrMerge "a" [] { a = [2];} (x : x ++ [3]) -> { a = [2 3]; }
# setAttrMerge "a" [] { } (x : x ++ [3]) -> { a = [ 3]; }
setAttrMerge = name : default : attrs : f :
setAttr attrs name (f (maybeAttr name default attrs));
# Using f = a: b = b the result is similar to //
# Using f = a : b = b the result is similar to //
# merge attributes with custom function handling the case that the attribute
# exists in both sets
mergeAttrsWithFunc = f: set1: set2:
fold (n: set: if set ? ${n}
mergeAttrsWithFunc = f : set1 : set2 :
fold (n: set : if set ? ${n}
then setAttr set n (f set.${n} set2.${n})
else set )
(set2 // set1) (attrNames set2);
# merging two attribute set concatenating the values of same attribute names
# eg { a = 7; } { a = [ 2 3 ]; } becomes { a = [ 7 2 3 ]; }
mergeAttrsConcatenateValues = mergeAttrsWithFunc ( a: b: (toList a) ++ (toList b) );
mergeAttrsConcatenateValues = mergeAttrsWithFunc ( a : b : (toList a) ++ (toList b) );
# merges attributes using //, if a name exists in both attributes
# merges attributes using //, if a name exisits in both attributes
# an error will be triggered unless its listed in mergeLists
# so you can mergeAttrsNoOverride { buildInputs = [a]; } { buildInputs = [a]; } {} to get
# { buildInputs = [a b]; }
# merging buildPhase doesn't really make sense. The cases will be rare where appending /prefixing will fit your needs?
# merging buildPhase does'nt really make sense. The cases will be rare where appending /prefixing will fit your needs?
# in these cases the first buildPhase will override the second one
# ! deprecated, use mergeAttrByFunc instead
mergeAttrsNoOverride = { mergeLists ? ["buildInputs" "propagatedBuildInputs"],
overrideSnd ? [ "buildPhase" ]
}: attrs1: attrs2:
fold (n: set:
} : attrs1 : attrs2 :
fold (n: set :
setAttr set n ( if set ? ${n}
then # merge
then # merge
if elem n mergeLists # attribute contains list, merge them by concatenating
then attrs2.${n} ++ attrs1.${n}
else if elem n overrideSnd
@@ -287,14 +286,14 @@ rec {
# { mergeAttrsBy = [...]; buildInputs = [ a b c d ]; }
# is used by prepareDerivationArgs, defaultOverridableDelayableArgs and can be used when composing using
# foldArgs, composedArgsAndFun or applyAndFun. Example: composableDerivation in all-packages.nix
mergeAttrByFunc = x: y:
mergeAttrByFunc = x : y :
let
mergeAttrBy2 = { mergeAttrBy = lib.mergeAttrs; }
mergeAttrBy2 = { mergeAttrBy=lib.mergeAttrs; }
// (maybeAttr "mergeAttrBy" {} x)
// (maybeAttr "mergeAttrBy" {} y); in
fold lib.mergeAttrs {} [
x y
(mapAttrs ( a: v: # merge special names using given functions
(mapAttrs ( a : v : # merge special names using given functions
if x ? ${a}
then if y ? ${a}
then v x.${a} y.${a} # both have attr, use merge func
@@ -310,19 +309,58 @@ rec {
mergeAttrsByFuncDefaults = foldl mergeAttrByFunc { inherit mergeAttrBy; };
mergeAttrsByFuncDefaultsClean = list: removeAttrs (mergeAttrsByFuncDefaults list) ["mergeAttrBy"];
# merge attrs based on version key into mkDerivation args, see mergeAttrBy to learn about smart merge defaults
#
# This function is best explained by an example:
#
# {version ? "2.x"} :
#
# mkDerivation (mergeAttrsByVersion "package-name" version
# { # version specific settings
# "git" = { src = ..; preConfigre = "autogen.sh"; buildInputs = [automake autoconf libtool]; };
# "2.x" = { src = ..; };
# }
# { // shared settings
# buildInputs = [ common build inputs ];
# meta = { .. }
# }
# )
#
# Please note that e.g. Eelco Dolstra usually prefers having one file for
# each version. On the other hand there are valuable additional design goals
# - readability
# - do it once only
# - try to avoid duplication
#
# Marc Weber and Michael Raskin sometimes prefer keeping older
# versions around for testing and regression tests - as long as its cheap to
# do so.
#
# Very often it just happens that the "shared" code is the bigger part.
# Then using this function might be appropriate.
#
# Be aware that its easy to cause recompilations in all versions when using
# this function - also if derivations get too complex splitting into multiple
# files is the way to go.
#
# See misc.nix -> versionedDerivation
# discussion: nixpkgs: pull/310
mergeAttrsByVersion = name: version: attrsByVersion: base:
mergeAttrsByFuncDefaultsClean [ { name = "${name}-${version}"; } base (maybeAttr version (throw "bad version ${version} for ${name}") attrsByVersion)];
# sane defaults (same name as attr name so that inherit can be used)
mergeAttrBy = # { buildInputs = concatList; [...]; passthru = mergeAttr; [..]; }
listToAttrs (map (n: nameValuePair n lib.concat)
listToAttrs (map (n : nameValuePair n lib.concat)
[ "nativeBuildInputs" "buildInputs" "propagatedBuildInputs" "configureFlags" "prePhases" "postAll" "patches" ])
// listToAttrs (map (n: nameValuePair n lib.mergeAttrs) [ "passthru" "meta" "cfg" "flags" ])
// listToAttrs (map (n: nameValuePair n (a: b: "${a}\n${b}") ) [ "preConfigure" "postInstall" ])
// listToAttrs (map (n : nameValuePair n lib.mergeAttrs) [ "passthru" "meta" "cfg" "flags" ])
// listToAttrs (map (n : nameValuePair n (a: b: "${a}\n${b}") ) [ "preConfigure" "postInstall" ])
;
# prepareDerivationArgs tries to make writing configurable derivations easier
# example:
# prepareDerivationArgs {
# mergeAttrBy = {
# myScript = x: y: x ++ "\n" ++ y;
# myScript = x : y : x ++ "\n" ++ y;
# };
# cfg = {
# readlineSupport = true;
@@ -354,10 +392,10 @@ rec {
# TODO use args.mergeFun here as well?
prepareDerivationArgs = args:
let args2 = { cfg = {}; flags = {}; } // args;
flagName = name: "${name}Support";
cfgWithDefaults = (listToAttrs (map (n: nameValuePair (flagName n) false) (attrNames args2.flags)))
flagName = name : "${name}Support";
cfgWithDefaults = (listToAttrs (map (n : nameValuePair (flagName n) false) (attrNames args2.flags)))
// args2.cfg;
opts = attrValues (mapAttrs (a: v:
opts = attrValues (mapAttrs (a : v :
let v2 = if v ? set || v ? unset then v else { set = v; };
n = if cfgWithDefaults.${flagName a} then "set" else "unset";
attr = maybeAttr n {} v2; in
@@ -374,7 +412,7 @@ rec {
if isAttrs x then
if x ? outPath then "derivation"
else "attrs"
else if lib.isFunction x then "function"
else if isFunction x then "function"
else if isList x then "list"
else if x == true then "bool"
else if x == false then "bool"
@@ -382,12 +420,4 @@ rec {
else if isInt x then "int"
else "string";
/* deprecated:
For historical reasons, imap has an index starting at 1.
But for consistency with the rest of the library we want an index
starting at zero.
*/
imap = imap1;
}

View File

@@ -1,13 +0,0 @@
# snippets that can be shared by multiple fetchers (pkgs/build-support)
{ lib }:
{
proxyImpureEnvVars = [
# We borrow these environment variables from the caller to allow
# easy proxy configuration. This is impure, but a fixed-output
# derivation like fetchurl is allowed to do so since its result is
# by definition pure.
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
];
}

View File

@@ -1,45 +0,0 @@
{ lib }:
{ # haskellPathsInDir : Path -> Map String Path
# A map of all haskell packages defined in the given path,
# identified by having a cabal file with the same name as the
# directory itself.
haskellPathsInDir = root:
let # Files in the root
root-files = builtins.attrNames (builtins.readDir root);
# Files with their full paths
root-files-with-paths =
map (file:
{ name = file; value = root + "/${file}"; }
) root-files;
# Subdirectories of the root with a cabal file.
cabal-subdirs =
builtins.filter ({ name, value }:
builtins.pathExists (value + "/${name}.cabal")
) root-files-with-paths;
in builtins.listToAttrs cabal-subdirs;
# locateDominatingFile : RegExp
# -> Path
# -> Nullable { path : Path;
# matches : [ MatchResults ];
# }
# Find the first directory containing a file matching 'pattern'
# upward from a given 'file'.
# Returns 'null' if no directories contain a file matching 'pattern'.
locateDominatingFile = pattern: file:
let go = path:
let files = builtins.attrNames (builtins.readDir path);
matches = builtins.filter (match: match != null)
(map (builtins.match pattern) files);
in
if builtins.length matches != 0
then { inherit path matches; }
else if path == /.
then null
else go (dirOf path);
parent = dirOf file;
isDir =
let base = baseNameOf file;
type = (builtins.readDir parent).${base} or null;
in file == /. || type == "directory";
in go (if isDir then file else parent);
}

View File

@@ -1,79 +0,0 @@
{ ... }:
rec {
# Compute the fixed point of the given function `f`, which is usually an
# attribute set that expects its final, non-recursive representation as an
# argument:
#
# f = self: { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; }
#
# Nix evaluates this recursion until all references to `self` have been
# resolved. At that point, the final result is returned and `f x = x` holds:
#
# nix-repl> fix f
# { bar = "bar"; foo = "foo"; foobar = "foobar"; }
#
# Type: fix :: (a -> a) -> a
#
# See https://en.wikipedia.org/wiki/Fixed-point_combinator for further
# details.
fix = f: let x = f x; in x;
# A variant of `fix` that records the original recursive attribute set in the
# result. This is useful in combination with the `extends` function to
# implement deep overriding. See pkgs/development/haskell-modules/default.nix
# for a concrete example.
fix' = f: let x = f x // { __unfix__ = f; }; in x;
# Modify the contents of an explicitly recursive attribute set in a way that
# honors `self`-references. This is accomplished with a function
#
# g = self: super: { foo = super.foo + " + "; }
#
# that has access to the unmodified input (`super`) as well as the final
# non-recursive representation of the attribute set (`self`). `extends`
# differs from the native `//` operator insofar as that it's applied *before*
# references to `self` are resolved:
#
# nix-repl> fix (extends g f)
# { bar = "bar"; foo = "foo + "; foobar = "foo + bar"; }
#
# The name of the function is inspired by object-oriented inheritance, i.e.
# think of it as an infix operator `g extends f` that mimics the syntax from
# Java. It may seem counter-intuitive to have the "base class" as the second
# argument, but it's nice this way if several uses of `extends` are cascaded.
extends = f: rattrs: self: let super = rattrs self; in super // f self super;
# Compose two extending functions of the type expected by 'extends'
# into one where changes made in the first are available in the
# 'super' of the second
composeExtensions =
f: g: self: super:
let fApplied = f self super;
super' = super // fApplied;
in fApplied // g self super';
# Create an overridable, recursive attribute set. For example:
#
# nix-repl> obj = makeExtensible (self: { })
#
# nix-repl> obj
# { __unfix__ = «lambda»; extend = «lambda»; }
#
# nix-repl> obj = obj.extend (self: super: { foo = "foo"; })
#
# nix-repl> obj
# { __unfix__ = «lambda»; extend = «lambda»; foo = "foo"; }
#
# nix-repl> obj = obj.extend (self: super: { foo = super.foo + " + "; bar = "bar"; foobar = self.foo + self.bar; })
#
# nix-repl> obj
# { __unfix__ = «lambda»; bar = "bar"; extend = «lambda»; foo = "foo + "; foobar = "foo + bar"; }
makeExtensible = makeExtensibleWithCustomName "extend";
# Same as `makeExtensible` but the name of the extending attribute is
# customized.
makeExtensibleWithCustomName = extenderName: rattrs:
fix' rattrs // {
${extenderName} = f: makeExtensibleWithCustomName extenderName (extends f rattrs);
};
}

View File

@@ -1,226 +0,0 @@
/* Functions that generate widespread file
* formats from nix data structures.
*
* They all follow a similar interface:
* generator { config-attrs } data
*
* `config-attrs` are holes in the generators
* with sensible default implementations that
* can be overwritten. The default implementations
* are mostly generators themselves, called with
* their respective default values; they can be reused.
*
* Tests can be found in ./tests.nix
* Documentation in the manual, #sec-generators
*/
{ lib }:
with (lib).trivial;
let
libStr = lib.strings;
libAttr = lib.attrsets;
inherit (lib) isFunction;
in
rec {
## -- HELPER FUNCTIONS & DEFAULTS --
/* Convert a value to a sensible default string representation.
* The builtin `toString` function has some strange defaults,
* suitable for bash scripts but not much else.
*/
mkValueStringDefault = {}: v: with builtins;
let err = t: v: abort
("generators.mkValueStringDefault: " +
"${t} not supported: ${toPretty {} v}");
in if isInt v then toString v
# we default to not quoting strings
else if isString v then v
# isString returns "1", which is not a good default
else if true == v then "true"
# here it returns to "", which is even less of a good default
else if false == v then "false"
else if null == v then "null"
# if you have lists you probably want to replace this
else if isList v then err "lists" v
# same as for lists, might want to replace
else if isAttrs v then err "attrsets" v
else if isFunction v then err "functions" v
else err "this value is" (toString v);
/* Generate a line of key k and value v, separated by
* character sep. If sep appears in k, it is escaped.
* Helper for synaxes with different separators.
*
* mkValueString specifies how values should be formatted.
*
* mkKeyValueDefault {} ":" "f:oo" "bar"
* > "f\:oo:bar"
*/
mkKeyValueDefault = {
mkValueString ? mkValueStringDefault {}
}: sep: k: v:
"${libStr.escape [sep] k}${sep}${mkValueString v}";
## -- FILE FORMAT GENERATORS --
/* Generate a key-value-style config file from an attrset.
*
* mkKeyValue is the same as in toINI.
*/
toKeyValue = {
mkKeyValue ? mkKeyValueDefault {} "="
}: attrs:
let mkLine = k: v: mkKeyValue k v + "\n";
in libStr.concatStrings (libAttr.mapAttrsToList mkLine attrs);
/* Generate an INI-style config file from an
* attrset of sections to an attrset of key-value pairs.
*
* generators.toINI {} {
* foo = { hi = "${pkgs.hello}"; ciao = "bar"; };
* baz = { "also, integers" = 42; };
* }
*
*> [baz]
*> also, integers=42
*>
*> [foo]
*> ciao=bar
*> hi=/nix/store/y93qql1p5ggfnaqjjqhxcw0vqw95rlz0-hello-2.10
*
* The mk* configuration attributes can generically change
* the way sections and key-value strings are generated.
*
* For more examples see the test cases in ./tests.nix.
*/
toINI = {
# apply transformations (e.g. escapes) to section names
mkSectionName ? (name: libStr.escape [ "[" "]" ] name),
# format a setting line from key and value
mkKeyValue ? mkKeyValueDefault {} "="
}: attrsOfAttrs:
let
# map function to string for each key val
mapAttrsToStringsSep = sep: mapFn: attrs:
libStr.concatStringsSep sep
(libAttr.mapAttrsToList mapFn attrs);
mkSection = sectName: sectValues: ''
[${mkSectionName sectName}]
'' + toKeyValue { inherit mkKeyValue; } sectValues;
in
# map input to ini sections
mapAttrsToStringsSep "\n" mkSection attrsOfAttrs;
/* Generates JSON from an arbitrary (non-function) value.
* For more information see the documentation of the builtin.
*/
toJSON = {}: builtins.toJSON;
/* YAML has been a strict superset of JSON since 1.2, so we
* use toJSON. Before it only had a few differences referring
* to implicit typing rules, so it should work with older
* parsers as well.
*/
toYAML = {}@args: toJSON args;
/* Pretty print a value, akin to `builtins.trace`.
* Should probably be a builtin as well.
*/
toPretty = {
/* If this option is true, attrsets like { __pretty = fn; val = ; }
will use fn to convert val to a pretty printed representation.
(This means fn is type Val -> String.) */
allowPrettyValues ? false
}@args: v: with builtins;
let isPath = v: typeOf v == "path";
in if isInt v then toString v
else if isString v then ''"${libStr.escape [''"''] v}"''
else if true == v then "true"
else if false == v then "false"
else if null == v then "null"
else if isPath v then toString v
else if isList v then "[ "
+ libStr.concatMapStringsSep " " (toPretty args) v
+ " ]"
else if isAttrs v then
# apply pretty values if allowed
if attrNames v == [ "__pretty" "val" ] && allowPrettyValues
then v.__pretty v.val
# TODO: there is probably a better representation?
else if v ? type && v.type == "derivation" then
"<δ:${v.name}>"
# "<δ:${concatStringsSep "," (builtins.attrNames v)}>"
else "{ "
+ libStr.concatStringsSep " " (libAttr.mapAttrsToList
(name: value:
"${toPretty args name} = ${toPretty args value};") v)
+ " }"
else if isFunction v then
let fna = lib.functionArgs v;
showFnas = concatStringsSep "," (libAttr.mapAttrsToList
(name: hasDefVal: if hasDefVal then "(${name})" else name)
fna);
in if fna == {} then "<λ>"
else "<λ:{${showFnas}}>"
else abort "generators.toPretty: should never happen (v = ${v})";
# PLIST handling
toPlist = {}: v: let
isFloat = builtins.isFloat or (x: false);
expr = ind: x: with builtins;
if isNull x then "" else
if isBool x then bool ind x else
if isInt x then int ind x else
if isString x then str ind x else
if isList x then list ind x else
if isAttrs x then attrs ind x else
if isFloat x then float ind x else
abort "generators.toPlist: should never happen (v = ${v})";
literal = ind: x: ind + x;
bool = ind: x: literal ind (if x then "<true/>" else "<false/>");
int = ind: x: literal ind "<integer>${toString x}</integer>";
str = ind: x: literal ind "<string>${x}</string>";
key = ind: x: literal ind "<key>${x}</key>";
float = ind: x: literal ind "<real>${toString x}</real>";
indent = ind: expr "\t${ind}";
item = ind: libStr.concatMapStringsSep "\n" (indent ind);
list = ind: x: libStr.concatStringsSep "\n" [
(literal ind "<array>")
(item ind x)
(literal ind "</array>")
];
attrs = ind: x: libStr.concatStringsSep "\n" [
(literal ind "<dict>")
(attr ind x)
(literal ind "</dict>")
];
attr = let attrFilter = name: value: name != "_module" && value != null;
in ind: x: libStr.concatStringsSep "\n" (lib.flatten (lib.mapAttrsToList
(name: value: lib.optional (attrFilter name value) [
(key "\t${ind}" name)
(expr "\t${ind}" value)
]) x));
in ''<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
${expr "" v}
</plist>'';
}

View File

@@ -1,57 +0,0 @@
{ lib
# we pass the kernel version here to keep a nice syntax `whenOlder "4.13"`
# kernelVersion, e.g., config.boot.kernelPackages.version
, version
, mkValuePreprocess ? null
}:
with lib;
rec {
# Common patterns
when = cond: opt: if cond then opt else null;
whenAtLeast = ver: when (versionAtLeast version ver);
whenOlder = ver: when (versionOlder version ver);
whenBetween = verLow: verHigh: when (versionAtLeast version verLow && versionOlder version verHigh);
# Keeping these around in case we decide to change this horrible implementation :)
option = x: if x == null then null else "?${x}";
yes = "y";
no = "n";
module = "m";
mkValue = val:
let
isNumber = c: elem c ["0" "1" "2" "3" "4" "5" "6" "7" "8" "9"];
in
if val == "" then "\"\""
else if val == yes || val == module || val == no then val
else if all isNumber (stringToCharacters val) then val
else if substring 0 2 val == "0x" then val
else val; # FIXME: fix quoting one day
# generate nix intermediate kernel config file of the form
#
# VIRTIO_MMIO m
# VIRTIO_BLK y
# VIRTIO_CONSOLE n
# NET_9P_VIRTIO? y
#
# Use mkValuePreprocess to preprocess option values, aka mark 'modules' as
# 'yes' or vice-versa
# Borrowed from copumpkin https://github.com/NixOS/nixpkgs/pull/12158
# returns a string, expr should be an attribute set
generateNixKConf = exprs: mkValuePreprocess:
let
mkConfigLine = key: rawval:
let
val = if builtins.isFunction mkValuePreprocess then mkValuePreprocess rawval else rawval;
in
if val == null
then ""
else if hasPrefix "?" val
then "${key}? ${mkValue (removePrefix "?" val)}\n"
else "${key} ${mkValue val}\n";
mkConf = cfg: concatStrings (mapAttrsToList mkConfigLine cfg);
in mkConf exprs;
}

View File

@@ -1,8 +1,9 @@
{ lib }:
let
lib = import ./default.nix;
spdx = lic: lic // {
url = "http://spdx.org/licenses/${lic.spdxId}.html";
url = "http://spdx.org/licenses/${lic.spdxId}";
};
in
@@ -15,12 +16,7 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
afl21 = spdx {
spdxId = "AFL-2.1";
fullName = "Academic Free License v2.1";
};
afl3 = spdx {
spdxId = "AFL-3.0";
fullName = "Academic Free License v3.0";
fullName = "Academic Free License";
};
agpl3 = spdx {
@@ -42,7 +38,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
amd = {
fullName = "AMD License Agreement";
url = http://developer.amd.com/amd-license-agreement/;
free = false;
};
apsl20 = spdx {
@@ -50,11 +45,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "Apple Public Source License 2.0";
};
arphicpl = {
fullName = "Arphic Public License";
url = https://www.freedesktop.org/wiki/Arphic_Public_License/;
};
artistic1 = spdx {
spdxId = "Artistic-1.0";
fullName = "Artistic License 1.0";
@@ -75,16 +65,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "Boost Software License 1.0";
};
beerware = spdx {
spdxId = "Beerware";
fullName = ''Beerware License'';
};
bsd0 = spdx {
spdxId = "0BSD";
fullName = "BSD Zero Clause License";
};
bsd2 = spdx {
spdxId = "BSD-2-Clause";
fullName = ''BSD 2-clause "Simplified" License'';
@@ -100,17 +80,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = ''BSD 4-clause "Original" or "Old" License'';
};
bsl11 = {
fullName = "Business Source License 1.1";
url = https://mariadb.com/bsl11;
free = false;
};
clArtistic = spdx {
spdxId = "ClArtistic";
fullName = "Clarified Artistic License";
};
cc0 = spdx {
spdxId = "CC0-1.0";
fullName = "Creative Commons Zero v1.0 Universal";
@@ -119,37 +88,21 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
cc-by-nc-sa-20 = spdx {
spdxId = "CC-BY-NC-SA-2.0";
fullName = "Creative Commons Attribution Non Commercial Share Alike 2.0";
free = false;
};
cc-by-nc-sa-25 = spdx {
spdxId = "CC-BY-NC-SA-2.5";
fullName = "Creative Commons Attribution Non Commercial Share Alike 2.5";
free = false;
};
cc-by-nc-sa-30 = spdx {
spdxId = "CC-BY-NC-SA-3.0";
fullName = "Creative Commons Attribution Non Commercial Share Alike 3.0";
free = false;
};
cc-by-nc-sa-40 = spdx {
spdxId = "CC-BY-NC-SA-4.0";
fullName = "Creative Commons Attribution Non Commercial Share Alike 4.0";
free = false;
};
cc-by-nc-40 = spdx {
spdxId = "CC-BY-NC-4.0";
fullName = "Creative Commons Attribution Non Commercial 4.0 International";
free = false;
};
cc-by-nd-30 = spdx {
spdxId = "CC-BY-ND-3.0";
fullName = "Creative Commons Attribution-No Derivative Works v3.00";
free = false;
};
cc-by-sa-25 = spdx {
@@ -197,32 +150,16 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "CeCILL-C Free Software License Agreement";
};
cpal10 = spdx {
spdxId = "CPAL-1.0";
fullName = "Common Public Attribution License 1.0";
};
cpl10 = spdx {
spdxId = "CPL-1.0";
fullName = "Common Public License 1.0";
};
curl = {
fullName = "MIT/X11 derivate";
url = "https://curl.haxx.se/docs/copyright.html";
};
doc = spdx {
spdxId = "DOC";
fullName = "DOC License";
};
eapl = {
fullName = "EPSON AVASYS PUBLIC LICENSE";
url = http://avasys.jp/hp/menu000000700/hpg000000603.htm;
free = false;
};
efl10 = spdx {
spdxId = "EFL-1.0";
fullName = "Eiffel Forum License v1.0";
@@ -233,33 +170,11 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "Eiffel Forum License v2.0";
};
elastic = {
fullName = "ELASTIC LICENSE";
url = https://github.com/elastic/elasticsearch/blob/master/licenses/ELASTIC-LICENSE.txt;
free = false;
};
epl10 = spdx {
spdxId = "EPL-1.0";
fullName = "Eclipse Public License 1.0";
};
epl20 = spdx {
spdxId = "EPL-2.0";
fullName = "Eclipse Public License 2.0";
};
epson = {
fullName = "Seiko Epson Corporation Software License Agreement for Linux";
url = https://download.ebz.epson.net/dsc/du/02/eula/global/LINUX_EN.html;
free = false;
};
eupl11 = spdx {
spdxId = "EUPL-1.1";
fullName = "European Union Public License 1.1";
};
fdl12 = spdx {
spdxId = "GFDL-1.2";
fullName = "GNU Free Documentation License v1.2";
@@ -267,30 +182,13 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fdl13 = spdx {
spdxId = "GFDL-1.3";
fullName = "GNU Free Documentation License v1.3";
};
ffsl = {
fullName = "Floodgap Free Software License";
url = http://www.floodgap.com/software/ffsl/license.html;
free = false;
fullName = "GNU Free Documentation License v1.2";
};
free = {
fullName = "Unspecified free software license";
};
g4sl = {
fullName = "Geant4 Software License";
url = https://geant4.web.cern.ch/geant4/license/LICENSE.html;
};
geogebra = {
fullName = "GeoGebra Non-Commercial License Agreement";
url = https://www.geogebra.org/license;
free = false;
};
gpl1 = spdx {
spdxId = "GPL-1.0";
fullName = "GNU General Public License v1.0 only";
@@ -306,12 +204,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "GNU General Public License v2.0 only";
};
gpl2Classpath = {
spdxId = "GPL-2.0-with-classpath-exception";
fullName = "GNU General Public License v2.0 only (with Classpath exception)";
url = https://fedoraproject.org/wiki/Licensing/GPL_Classpath_Exception;
};
gpl2ClasspathPlus = {
fullName = "GNU General Public License v2.0 or later (with Classpath exception)";
url = https://fedoraproject.org/wiki/Licensing/GPL_Classpath_Exception;
@@ -319,7 +211,7 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
gpl2Oss = {
fullName = "GNU General Public License version 2 only (with OSI approved licenses linking exception)";
url = https://www.mysql.com/about/legal/licensing/foss-exception;
url = http://www.mysql.com/about/legal/licensing/foss-exception;
};
gpl2Plus = spdx {
@@ -342,11 +234,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
url = https://fedoraproject.org/wiki/Licensing/GPL_Classpath_Exception;
};
hpnd = spdx {
spdxId = "HPND";
fullName = "Historic Permission Notice and Disclaimer";
};
# Intel's license, seems free
iasl = {
fullName = "iASL";
@@ -358,21 +245,9 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "Independent JPEG Group License";
};
imagemagick = spdx {
fullName = "ImageMagick License";
spdxId = "imagemagick";
};
inria-compcert = {
fullName = "INRIA Non-Commercial License Agreement for the CompCert verified compiler";
inria = {
fullName = "INRIA Non-Commercial License Agreement";
url = "http://compcert.inria.fr/doc/LICENSE";
free = false;
};
inria-icesl = {
fullName = "INRIA Non-Commercial License Agreement for IceSL";
url = "http://shapeforge.loria.fr/icesl/EULA_IceSL_binary.pdf";
free = false;
};
ipa = spdx {
@@ -450,11 +325,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "Lucent Public License v1.02";
};
miros = {
fullName = "MirOS License";
url = https://opensource.org/licenses/MirOS;
};
# spdx.org does not (yet) differentiate between the X11 and Expat versions
# for details see http://en.wikipedia.org/wiki/MIT_License#Various_versions
mit = spdx {
@@ -477,15 +347,9 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "Mozilla Public License 2.0";
};
mspl = spdx {
spdxId = "MS-PL";
fullName = "Microsoft Public License";
};
msrla = {
fullName = "Microsoft Research License Agreement";
url = "http://research.microsoft.com/en-us/projects/pex/msr-la.txt";
free = false;
};
ncsa = spdx {
@@ -497,12 +361,7 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
url = "https://raw.githubusercontent.com/raboof/notion/master/LICENSE";
fullName = "Notion modified LGPL";
};
nposl3 = spdx {
spdxId = "NPOSL-3.0";
fullName = "Non-Profit Open Software License 3.0";
};
ofl = spdx {
spdxId = "OFL-1.1";
fullName = "SIL Open Font License 1.1";
@@ -518,16 +377,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "OpenSSL License";
};
osl21 = spdx {
spdxId = "OSL-2.1";
fullName = "Open Software License 2.1";
};
osl3 = spdx {
spdxId = "OSL-3.0";
fullName = "Open Software License 3.0";
};
php301 = spdx {
spdxId = "PHP-3.01";
fullName = "PHP License v3.01";
@@ -538,12 +387,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "PostgreSQL License";
};
postman = {
fullName = "Postman EULA";
url = https://www.getpostman.com/licenses/postman_base_app;
free = false;
};
psfl = spdx {
spdxId = "Python-2.0";
fullName = "Python Software Foundation License version 2";
@@ -579,12 +422,6 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "Sleepycat License";
};
smail = {
shortName = "smail";
fullName = "SMAIL General Public License";
url = http://metadata.ftp-master.debian.org/changelogs/main/d/debianutils/debianutils_4.8.1_copyright;
};
tcltk = spdx {
spdxId = "TCL";
fullName = "TCL/TK License";
@@ -616,32 +453,16 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "The Unlicense";
};
upl = {
fullName = "Universal Permissive License";
url = "https://oss.oracle.com/licenses/upl/";
};
vim = spdx {
spdxId = "Vim";
fullName = "Vim License";
};
virtualbox-puel = {
fullName = "Oracle VM VirtualBox Extension Pack Personal Use and Evaluation License (PUEL)";
url = "https://www.virtualbox.org/wiki/VirtualBox_PUEL";
free = false;
};
vsl10 = spdx {
spdxId = "VSL-1.0";
fullName = "Vovida Software License v1.0";
};
watcom = spdx {
spdxId = "Watcom-1.0";
fullName = "Sybase Open Watcom Public License 1.0";
};
w3c = spdx {
spdxId = "W3C";
fullName = "W3C Software Notice and License";
@@ -657,28 +478,19 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec {
fullName = "Do What The F*ck You Want To Public License";
};
wxWindows = spdx {
spdxId = "WXwindows";
fullName = "wxWindows Library Licence, Version 3.1";
};
xfig = {
fullName = "xfig";
url = "http://mcj.sourceforge.net/authors.html#xfig";
};
zlib = spdx {
spdxId = "Zlib";
fullName = "zlib License";
};
zpl20 = spdx {
zpt20 = spdx { # FIXME: why zpt* instead of zpl*
spdxId = "ZPL-2.0";
fullName = "Zope Public License 2.0";
};
zpl21 = spdx {
zpt21 = spdx {
spdxId = "ZPL-2.1";
fullName = "Zope Public License 2.1";
};
}

View File

@@ -1,42 +1,23 @@
# General list operations.
{ lib }:
with lib.trivial;
let
inherit (lib.strings) toInt;
in
with import ./trivial.nix;
rec {
inherit (builtins) head tail length isList elemAt concatLists filter elem genList;
/* Create a list consisting of a single element. `singleton x` is
sometimes more convenient with respect to indentation than `[x]`
when x spans multiple lines.
Type: singleton :: a -> [a]
Example:
singleton "foo"
=> [ "foo" ]
*/
# Create a list consisting of a single element. `singleton x' is
# sometimes more convenient with respect to indentation than `[x]'
# when x spans multiple lines.
singleton = x: [x];
/* right fold a binary function `op` between successive elements of
`list` with `nul' as the starting value, i.e.,
`foldr op nul [x_1 x_2 ... x_n] == op x_1 (op x_2 ... (op x_n nul))`.
Type: foldr :: (a -> b -> b) -> b -> [a] -> b
Example:
concat = foldr (a: b: a + b) "z"
concat [ "a" "b" "c" ]
=> "abcz"
# different types
strange = foldr (int: str: toString (int + 1) + str) "a"
strange [ 1 2 3 4 ]
=> "2345a"
*/
foldr = op: nul: list:
# "Fold" a binary function `op' between successive elements of
# `list' with `nul' as the starting value, i.e., `fold op nul [x_1
# x_2 ... x_n] == op x_1 (op x_2 ... (op x_n nul))'. (This is
# Haskell's foldr).
fold = op: nul: list:
let
len = length list;
fold' = n:
@@ -45,427 +26,156 @@ rec {
else op (elemAt list n) (fold' (n + 1));
in fold' 0;
/* `fold` is an alias of `foldr` for historic reasons */
# FIXME(Profpatsch): deprecate?
fold = foldr;
/* left fold, like `foldr`, but from the left:
`foldl op nul [x_1 x_2 ... x_n] == op (... (op (op nul x_1) x_2) ... x_n)`.
Type: foldl :: (b -> a -> b) -> b -> [a] -> b
Example:
lconcat = foldl (a: b: a + b) "z"
lconcat [ "a" "b" "c" ]
=> "zabc"
# different types
lstrange = foldl (str: int: str + toString (int + 1)) ""
strange [ 1 2 3 4 ]
=> "a2345"
*/
# Left fold: `fold op nul [x_1 x_2 ... x_n] == op (... (op (op nul
# x_1) x_2) ... x_n)'.
foldl = op: nul: list:
let
len = length list;
foldl' = n:
if n == -1
then nul
else op (foldl' (n - 1)) (elemAt list n);
in foldl' (length list - 1);
/* Strict version of `foldl`.
The difference is that evaluation is forced upon access. Usually used
with small whole results (in contract with lazily-generated list or large
lists where only a part is consumed.)
Type: foldl' :: (b -> a -> b) -> b -> [a] -> b
*/
# Strict version of foldl.
foldl' = builtins.foldl' or foldl;
/* Map with index starting from 0
Type: imap0 :: (int -> a -> b) -> [a] -> [b]
# Map with index: `imap (i: v: "${v}-${toString i}") ["a" "b"] ==
# ["a-1" "b-2"]'. FIXME: why does this start to count at 1?
imap =
if builtins ? genList then
f: list: genList (n: f (n + 1) (elemAt list n)) (length list)
else
f: list:
let
len = length list;
imap' = n:
if n == len
then []
else [ (f (n + 1) (elemAt list n)) ] ++ imap' (n + 1);
in imap' 0;
Example:
imap0 (i: v: "${v}-${toString i}") ["a" "b"]
=> [ "a-0" "b-1" ]
*/
imap0 = f: list: genList (n: f n (elemAt list n)) (length list);
/* Map with index starting from 1
# Map and concatenate the result.
concatMap = f: list: concatLists (map f list);
Type: imap1 :: (int -> a -> b) -> [a] -> [b]
Example:
imap1 (i: v: "${v}-${toString i}") ["a" "b"]
=> [ "a-1" "b-2" ]
*/
imap1 = f: list: genList (n: f (n + 1) (elemAt list n)) (length list);
/* Map and concatenate the result.
Type: concatMap :: (a -> [b]) -> [a] -> [b]
Example:
concatMap (x: [x] ++ ["z"]) ["a" "b"]
=> [ "a" "z" "b" "z" ]
*/
concatMap = builtins.concatMap or (f: list: concatLists (map f list));
/* Flatten the argument into a single list; that is, nested lists are
spliced into the top-level lists.
Example:
flatten [1 [2 [3] 4] 5]
=> [1 2 3 4 5]
flatten 1
=> [1]
*/
# Flatten the argument into a single list; that is, nested lists are
# spliced into the top-level lists. E.g., `flatten [1 [2 [3] 4] 5]
# == [1 2 3 4 5]' and `flatten 1 == [1]'.
flatten = x:
if isList x
then concatMap (y: flatten y) x
then foldl' (x: y: x ++ (flatten y)) [] x
else [x];
/* Remove elements equal to 'e' from a list. Useful for buildInputs.
Type: remove :: a -> [a] -> [a]
# Remove elements equal to 'e' from a list. Useful for buildInputs.
remove = e: filter (x: x != e);
Example:
remove 3 [ 1 3 4 3 ]
=> [ 1 4 ]
*/
remove =
# Element to remove from the list
e: filter (x: x != e);
/* Find the sole element in the list matching the specified
predicate, returns `default` if no such element exists, or
`multiple` if there are multiple matching elements.
Type: findSingle :: (a -> bool) -> a -> a -> [a] -> a
Example:
findSingle (x: x == 3) "none" "multiple" [ 1 3 3 ]
=> "multiple"
findSingle (x: x == 3) "none" "multiple" [ 1 3 ]
=> 3
findSingle (x: x == 3) "none" "multiple" [ 1 9 ]
=> "none"
*/
findSingle =
# Predicate
pred:
# Default value to return if element was not found.
default:
# Default value to return if more than one element was found
multiple:
# Input list
list:
# Find the sole element in the list matching the specified
# predicate, returns `default' if no such element exists, or
# `multiple' if there are multiple matching elements.
findSingle = pred: default: multiple: list:
let found = filter pred list; len = length found;
in if len == 0 then default
else if len != 1 then multiple
else head found;
/* Find the first element in the list matching the specified
predicate or return `default` if no such element exists.
Type: findFirst :: (a -> bool) -> a -> [a] -> a
Example:
findFirst (x: x > 3) 7 [ 1 6 4 ]
=> 6
findFirst (x: x > 9) 7 [ 1 6 4 ]
=> 7
*/
findFirst =
# Predicate
pred:
# Default value to return
default:
# Input list
list:
# Find the first element in the list matching the specified
# predicate or returns `default' if no such element exists.
findFirst = pred: default: list:
let found = filter pred list;
in if found == [] then default else head found;
/* Return true if function `pred` returns true for at least one
element of `list`.
Type: any :: (a -> bool) -> [a] -> bool
# Return true iff function `pred' returns true for at least element
# of `list'.
any = builtins.any or (pred: fold (x: y: if pred x then true else y) false);
Example:
any isString [ 1 "a" { } ]
=> true
any isString [ 1 { } ]
=> false
*/
any = builtins.any or (pred: foldr (x: y: if pred x then true else y) false);
/* Return true if function `pred` returns true for all elements of
`list`.
# Return true iff function `pred' returns true for all elements of
# `list'.
all = builtins.all or (pred: fold (x: y: if pred x then y else false) true);
Type: all :: (a -> bool) -> [a] -> bool
Example:
all (x: x < 3) [ 1 2 ]
=> true
all (x: x < 3) [ 1 2 3 ]
=> false
*/
all = builtins.all or (pred: foldr (x: y: if pred x then y else false) true);
# Count how many times function `pred' returns true for the elements
# of `list'.
count = pred: foldl' (c: x: if pred x then c + 1 else c) 0;
/* Count how many elements of `list` match the supplied predicate
function.
Type: count :: (a -> bool) -> [a] -> int
Example:
count (x: x == 3) [ 3 2 3 4 6 ]
=> 2
*/
count =
# Predicate
pred: foldl' (c: x: if pred x then c + 1 else c) 0;
/* Return a singleton list or an empty list, depending on a boolean
value. Useful when building lists with optional elements
(e.g. `++ optional (system == "i686-linux") flashplayer').
Type: optional :: bool -> a -> [a]
Example:
optional true "foo"
=> [ "foo" ]
optional false "foo"
=> [ ]
*/
# Return a singleton list or an empty list, depending on a boolean
# value. Useful when building lists with optional elements
# (e.g. `++ optional (system == "i686-linux") flashplayer').
optional = cond: elem: if cond then [elem] else [];
/* Return a list or an empty list, depending on a boolean value.
Type: optionals :: bool -> [a] -> [a]
Example:
optionals true [ 2 3 ]
=> [ 2 3 ]
optionals false [ 2 3 ]
=> [ ]
*/
optionals =
# Condition
cond:
# List to return if condition is true
elems: if cond then elems else [];
# Return a list or an empty list, dependening on a boolean value.
optionals = cond: elems: if cond then elems else [];
/* If argument is a list, return it; else, wrap it in a singleton
list. If you're using this, you should almost certainly
reconsider if there isn't a more "well-typed" approach.
Example:
toList [ 1 2 ]
=> [ 1 2 ]
toList "hi"
=> [ "hi "]
*/
# If argument is a list, return it; else, wrap it in a singleton
# list. If you're using this, you should almost certainly
# reconsider if there isn't a more "well-typed" approach.
toList = x: if isList x then x else [x];
/* Return a list of integers from `first' up to and including `last'.
Type: range :: int -> int -> [int]
Example:
range 2 4
=> [ 2 3 4 ]
range 3 2
=> [ ]
*/
# Return a list of integers from `first' up to and including `last'.
range =
# First integer in the range
first:
# Last integer in the range
last:
if first > last then
[]
if builtins ? genList then
first: last:
if first > last
then []
else genList (n: first + n) (last - first + 1)
else
genList (n: first + n) (last - first + 1);
first: last:
if last < first
then []
else [first] ++ range (first + 1) last;
/* Splits the elements of a list in two lists, `right` and
`wrong`, depending on the evaluation of a predicate.
Type: (a -> bool) -> [a] -> { right :: [a], wrong :: [a] }
Example:
partition (x: x > 2) [ 5 1 2 3 4 ]
=> { right = [ 5 3 4 ]; wrong = [ 1 2 ]; }
*/
partition = builtins.partition or (pred:
foldr (h: t:
# Partition the elements of a list in two lists, `right' and
# `wrong', depending on the evaluation of a predicate.
partition = pred:
fold (h: t:
if pred h
then { right = [h] ++ t.right; wrong = t.wrong; }
else { right = t.right; wrong = [h] ++ t.wrong; }
) { right = []; wrong = []; });
) { right = []; wrong = []; };
/* Splits the elements of a list into many lists, using the return value of a predicate.
Predicate should return a string which becomes keys of attrset `groupBy' returns.
`groupBy'` allows to customise the combining function and initial value
Example:
groupBy (x: boolToString (x > 2)) [ 5 1 2 3 4 ]
=> { true = [ 5 3 4 ]; false = [ 1 2 ]; }
groupBy (x: x.name) [ {name = "icewm"; script = "icewm &";}
{name = "xfce"; script = "xfce4-session &";}
{name = "icewm"; script = "icewmbg &";}
{name = "mate"; script = "gnome-session &";}
]
=> { icewm = [ { name = "icewm"; script = "icewm &"; }
{ name = "icewm"; script = "icewmbg &"; } ];
mate = [ { name = "mate"; script = "gnome-session &"; } ];
xfce = [ { name = "xfce"; script = "xfce4-session &"; } ];
}
groupBy' builtins.add 0 (x: boolToString (x > 2)) [ 5 1 2 3 4 ]
=> { true = 12; false = 3; }
*/
groupBy' = op: nul: pred: lst:
foldl' (r: e:
let
key = pred e;
in
r // { ${key} = op (r.${key} or nul) e; }
) {} lst;
groupBy = groupBy' (sum: e: sum ++ [e]) [];
/* Merges two lists of the same size together. If the sizes aren't the same
the merging stops at the shortest. How both lists are merged is defined
by the first argument.
Type: zipListsWith :: (a -> b -> c) -> [a] -> [b] -> [c]
Example:
zipListsWith (a: b: a + b) ["h" "l"] ["e" "o"]
=> ["he" "lo"]
*/
zipListsWith =
# Function to zip elements of both lists
f:
# First list
fst:
# Second list
snd:
genList
(n: f (elemAt fst n) (elemAt snd n)) (min (length fst) (length snd));
if builtins ? genList then
f: fst: snd: genList (n: f (elemAt fst n) (elemAt snd n)) (min (length fst) (length snd))
else
f: fst: snd:
let
len = min (length fst) (length snd);
zipListsWith' = n:
if n != len then
[ (f (elemAt fst n) (elemAt snd n)) ]
++ zipListsWith' (n + 1)
else [];
in zipListsWith' 0;
/* Merges two lists of the same size together. If the sizes aren't the same
the merging stops at the shortest.
Type: zipLists :: [a] -> [b] -> [{ fst :: a, snd :: b}]
Example:
zipLists [ 1 2 ] [ "a" "b" ]
=> [ { fst = 1; snd = "a"; } { fst = 2; snd = "b"; } ]
*/
zipLists = zipListsWith (fst: snd: { inherit fst snd; });
/* Reverse the order of the elements of a list.
Type: reverseList :: [a] -> [a]
# Reverse the order of the elements of a list.
reverseList =
if builtins ? genList then
xs: let l = length xs; in genList (n: elemAt xs (l - n - 1)) l
else
fold (e: acc: acc ++ [ e ]) [];
Example:
reverseList [ "b" "o" "j" ]
=> [ "j" "o" "b" ]
*/
reverseList = xs:
let l = length xs; in genList (n: elemAt xs (l - n - 1)) l;
/* Depth-First Search (DFS) for lists `list != []`.
`before a b == true` means that `b` depends on `a` (there's an
edge from `b` to `a`).
Example:
listDfs true hasPrefix [ "/home/user" "other" "/" "/home" ]
== { minimal = "/"; # minimal element
visited = [ "/home/user" ]; # seen elements (in reverse order)
rest = [ "/home" "other" ]; # everything else
}
listDfs true hasPrefix [ "/home/user" "other" "/" "/home" "/" ]
== { cycle = "/"; # cycle encountered at this element
loops = [ "/" ]; # and continues to these elements
visited = [ "/" "/home/user" ]; # elements leading to the cycle (in reverse order)
rest = [ "/home" "other" ]; # everything else
*/
listDfs = stopOnCycles: before: list:
let
dfs' = us: visited: rest:
let
c = filter (x: before x us) visited;
b = partition (x: before x us) rest;
in if stopOnCycles && (length c > 0)
then { cycle = us; loops = c; inherit visited rest; }
else if length b.right == 0
then # nothing is before us
{ minimal = us; inherit visited rest; }
else # grab the first one before us and continue
dfs' (head b.right)
([ us ] ++ visited)
(tail b.right ++ b.wrong);
in dfs' (head list) [] (tail list);
/* Sort a list based on a partial ordering using DFS. This
implementation is O(N^2), if your ordering is linear, use `sort`
instead.
`before a b == true` means that `b` should be after `a`
in the result.
Example:
toposort hasPrefix [ "/home/user" "other" "/" "/home" ]
== { result = [ "/" "/home" "/home/user" "other" ]; }
toposort hasPrefix [ "/home/user" "other" "/" "/home" "/" ]
== { cycle = [ "/home/user" "/" "/" ]; # path leading to a cycle
loops = [ "/" ]; } # loops back to these elements
toposort hasPrefix [ "other" "/home/user" "/home" "/" ]
== { result = [ "other" "/" "/home" "/home/user" ]; }
toposort (a: b: a < b) [ 3 2 1 ] == { result = [ 1 2 3 ]; }
*/
toposort = before: list:
let
dfsthis = listDfs true before list;
toporest = toposort before (dfsthis.visited ++ dfsthis.rest);
in
if length list < 2
then # finish
{ result = list; }
else if dfsthis ? "cycle"
then # there's a cycle, starting from the current vertex, return it
{ cycle = reverseList ([ dfsthis.cycle ] ++ dfsthis.visited);
inherit (dfsthis) loops; }
else if toporest ? "cycle"
then # there's a cycle somewhere else in the graph, return it
toporest
# Slow, but short. Can be made a bit faster with an explicit stack.
else # there are no cycles
{ result = [ dfsthis.minimal ] ++ toporest.result; };
/* Sort a list based on a comparator function which compares two
elements and returns true if the first argument is strictly below
the second argument. The returned list is sorted in an increasing
order. The implementation does a quick-sort.
Example:
sort (a: b: a < b) [ 5 3 7 ]
=> [ 3 5 7 ]
*/
# Sort a list based on a comparator function which compares two
# elements and returns true if the first argument is strictly below
# the second argument. The returned list is sorted in an increasing
# order. The implementation does a quick-sort.
sort = builtins.sort or (
strictLess: list:
let
@@ -483,97 +193,42 @@ rec {
if len < 2 then list
else (sort strictLess pivot.left) ++ [ first ] ++ (sort strictLess pivot.right));
/* Compare two lists element-by-element.
Example:
compareLists compare [] []
=> 0
compareLists compare [] [ "a" ]
=> -1
compareLists compare [ "a" ] []
=> 1
compareLists compare [ "a" "b" ] [ "a" "c" ]
=> 1
*/
compareLists = cmp: a: b:
if a == []
then if b == []
then 0
else -1
else if b == []
then 1
else let rel = cmp (head a) (head b); in
if rel == 0
then compareLists cmp (tail a) (tail b)
else rel;
/* Sort list using "Natural sorting".
Numeric portions of strings are sorted in numeric order.
Example:
naturalSort ["disk11" "disk8" "disk100" "disk9"]
=> ["disk8" "disk9" "disk11" "disk100"]
naturalSort ["10.46.133.149" "10.5.16.62" "10.54.16.25"]
=> ["10.5.16.62" "10.46.133.149" "10.54.16.25"]
naturalSort ["v0.2" "v0.15" "v0.0.9"]
=> [ "v0.0.9" "v0.2" "v0.15" ]
*/
naturalSort = lst:
let
vectorise = s: map (x: if isList x then toInt (head x) else x) (builtins.split "(0|[1-9][0-9]*)" s);
prepared = map (x: [ (vectorise x) x ]) lst; # remember vectorised version for O(n) regex splits
less = a: b: (compareLists compare (head a) (head b)) < 0;
in
map (x: elemAt x 1) (sort less prepared);
/* Return the first (at most) N elements of a list.
Type: take :: int -> [a] -> [a]
Example:
take 2 [ "a" "b" "c" "d" ]
=> [ "a" "b" ]
take 2 [ ]
=> [ ]
*/
# Return the first (at most) N elements of a list.
take =
# Number of elements to take
count: sublist 0 count;
if builtins ? genList then
count: sublist 0 count
else
count: list:
let
len = length list;
take' = n:
if n == len || n == count
then []
else
[ (elemAt list n) ] ++ take' (n + 1);
in take' 0;
/* Remove the first (at most) N elements of a list.
Type: drop :: int -> [a] -> [a]
Example:
drop 2 [ "a" "b" "c" "d" ]
=> [ "c" "d" ]
drop 2 [ ]
=> [ ]
*/
# Remove the first (at most) N elements of a list.
drop =
# Number of elements to drop
count:
# Input list
list: sublist count (length list) list;
if builtins ? genList then
count: list: sublist count (length list) list
else
count: list:
let
len = length list;
drop' = n:
if n == -1 || n < count
then []
else
drop' (n - 1) ++ [ (elemAt list n) ];
in drop' (len - 1);
/* Return a list consisting of at most `count` elements of `list`,
starting at index `start`.
Type: sublist :: int -> int -> [a] -> [a]
Example:
sublist 1 3 [ "a" "b" "c" "d" "e" ]
=> [ "b" "c" "d" ]
sublist 1 3 [ ]
=> [ ]
*/
sublist =
# Index at which to start the sublist
start:
# Number of elements to take
count:
# Input list
list:
# Return a list consisting of at most count elements of list,
# starting at index start.
sublist = start: count: list:
let len = length list; in
genList
(n: elemAt list (n + start))
@@ -581,49 +236,23 @@ rec {
else if start + count > len then len - start
else count);
/* Return the last element of a list.
This function throws an error if the list is empty.
Type: last :: [a] -> a
Example:
last [ 1 2 3 ]
=> 3
*/
# Return the last element of a list.
last = list:
assert list != []; elemAt list (length list - 1);
/* Return all elements but the last.
This function throws an error if the list is empty.
Type: init :: [a] -> [a]
Example:
init [ 1 2 3 ]
=> [ 1 2 ]
*/
# Return all elements but the last
init = list: assert list != []; take (length list - 1) list;
/* Return the image of the cross product of some lists by a function.
deepSeqList = xs: y: if any (x: deepSeq x false) xs then y else y;
Example:
crossLists (x:y: "${toString x}${toString y}") [[1 2] [3 4]]
=> [ "13" "14" "23" "24" ]
*/
crossLists = f: foldl (fs: args: concatMap (f: map f args) fs) [f];
/* Remove duplicate elements from the list. O(n^2) complexity.
Type: unique :: [a] -> [a]
Example:
unique [ 3 2 3 4 ]
=> [ 3 2 4 ]
*/
# Remove duplicate elements from the list. O(n^2) complexity.
unique = list:
if list == [] then
[]
@@ -633,28 +262,12 @@ rec {
xs = unique (drop 1 list);
in [x] ++ remove x xs;
/* Intersects list 'e' and another list. O(nm) complexity.
Example:
intersectLists [ 1 2 3 ] [ 6 3 2 ]
=> [ 3 2 ]
*/
# Intersects list 'e' and another list. O(nm) complexity.
intersectLists = e: filter (x: elem x e);
/* Subtracts list 'e' from another list. O(nm) complexity.
Example:
subtractLists [ 3 2 ] [ 1 2 3 4 5 3 ]
=> [ 1 4 5 ]
*/
# Subtracts list 'e' from another list. O(nm) complexity.
subtractLists = e: filter (x: !(elem x e));
/* Test if two lists have no common element.
It should be slightly more efficient than (intersectLists a b == [])
*/
mutuallyExclusive = a: b:
(builtins.length a) == 0 ||
(!(builtins.elem (builtins.head a) b) &&
mutuallyExclusive (builtins.tail a) b);
}

367
lib/maintainers.nix Normal file
View File

@@ -0,0 +1,367 @@
/* List of NixOS maintainers. The format is:
handle = "Real Name <address@example.org>";
where <handle> is preferred to be your GitHub username (so it's easy
to ping a package @<handle>), and <Real Name> is your real name, not
a pseudonym. Please keep the list alphabetically sorted. */
{
a1russell = "Adam Russell <adamlr6+pub@gmail.com>";
aaronschif = "Aaron Schif <aaronschif@gmail.com>";
abaldeau = "Andreas Baldeau <andreas@baldeau.net>";
abbradar = "Nikolay Amiantov <ab@fmap.me>";
adev = "Adrien Devresse <adev@adev.name>";
aespinosa = "Allan Espinosa <allan.espinosa@outlook.com>";
aflatter = "Alexander Flatter <flatter@fastmail.fm>";
aforemny = "Alexander Foremny <alexanderforemny@googlemail.com>";
aherrmann = "Andreas Herrmann <andreash87@gmx.ch>";
ak = "Alexander Kjeldaas <ak@formalprivacy.com>";
akaWolf = "Artjom Vejsel <akawolf0@gmail.com>";
akc = "Anders Claesson <akc@akc.is>";
algorith = "Dries Van Daele <dries_van_daele@telenet.be>";
all = "Nix Committers <nix-commits@lists.science.uu.nl>";
ambrop72 = "Ambroz Bizjak <ambrop7@gmail.com>";
amiddelk = "Arie Middelkoop <amiddelk@gmail.com>";
amorsillo = "Andrew Morsillo <andrew.morsillo@gmail.com>";
AndersonTorres = "Anderson Torres <torres.anderson.85@gmail.com>";
anderspapitto = "Anders Papitto <anderspapitto@gmail.com>";
andres = "Andres Loeh <ksnixos@andres-loeh.de>";
andrewrk = "Andrew Kelley <superjoe30@gmail.com>";
aneeshusa = "Aneesh Agrawal <aneeshusa@gmail.com>";
antono = "Antono Vasiljev <self@antono.info>";
ardumont = "Antoine R. Dumont <eniotna.t@gmail.com>";
aristid = "Aristid Breitkreuz <aristidb@gmail.com>";
arobyn = "Alexei Robyn <shados@shados.net>";
asppsa = "Alastair Pharo <asppsa@gmail.com>";
astsmtl = "Alexander Tsamutali <astsmtl@yandex.ru>";
aszlig = "aszlig <aszlig@redmoonstudios.org>";
auntie = "Jonathan Glines <auntieNeo@gmail.com>";
avnik = "Alexander V. Nikolaev <avn@avnik.info>";
aycanirican = "Aycan iRiCAN <iricanaycan@gmail.com>";
badi = "Badi' Abdul-Wahid <abdulwahidc@gmail.com>";
balajisivaraman = "Balaji Sivaraman<sivaraman.balaji@gmail.com>";
Baughn = "Svein Ove Aas <sveina@gmail.com>";
bbenoist = "Baptist BENOIST <return_0@live.com>";
bcarrell = "Brandon Carrell <brandoncarrell@gmail.com>";
bcdarwin = "Ben Darwin <bcdarwin@gmail.com>";
bdimcheff = "Brandon Dimcheff <brandon@dimcheff.com>";
benley = "Benjamin Staffin <benley@gmail.com>";
bennofs = "Benno Fünfstück <benno.fuenfstueck@gmail.com>";
berdario = "Dario Bertini <berdario@gmail.com>";
bergey = "Daniel Bergey <bergey@teallabs.org>";
bjg = "Brian Gough <bjg@gnu.org>";
bjornfor = "Bjørn Forsman <bjorn.forsman@gmail.com>";
bluescreen303 = "Mathijs Kwik <mathijs@bluescreen303.nl>";
bobvanderlinden = "Bob van der Linden <bobvanderlinden@gmail.com>";
bodil = "Bodil Stokke <nix@bodil.org>";
boothead = "Ben Ford <ben@perurbis.com>";
bosu = "Boris Sukholitko <boriss@gmail.com>";
bramd = "Bram Duvigneau <bram@bramd.nl>";
bstrik = "Berno Strik <dutchman55@gmx.com>";
bzizou = "Bruno Bzeznik <Bruno@bzizou.net>";
c0dehero = "CodeHero <codehero@nerdpol.ch>";
calrama = "Moritz Maxeiner <moritz@ucworks.org>";
campadrenalin = "Philip Horger <campadrenalin@gmail.com>";
cdepillabout = "Dennis Gosnell <cdep.illabout@gmail.com>";
cfouche = "Chaddaï Fouché <chaddai.fouche@gmail.com>";
chaoflow = "Florian Friesdorf <flo@chaoflow.net>";
chattered = "Phil Scott <me@philscotted.com>";
christopherpoole = "Christopher Mark Poole <mail@christopherpoole.net>";
cleverca22 = "Michael Bishop <cleverca22@gmail.com>";
coconnor = "Corey O'Connor <coreyoconnor@gmail.com>";
codsl = "codsl <codsl@riseup.net>";
codyopel = "Cody Opel <codyopel@gmail.com>";
colemickens = "Cole Mickens <cole.mickens@gmail.com>";
copumpkin = "Dan Peebles <pumpkingod@gmail.com>";
coroa = "Jonas Hörsch <jonas@chaoflow.net>";
couchemar = "Andrey Pavlov <couchemar@yandex.ru>";
cstrahan = "Charles Strahan <charles.c.strahan@gmail.com>";
cwoac = "Oliver Matthews <oliver@codersoffortune.net>";
DamienCassou = "Damien Cassou <damien@cassou.me>";
dasuxullebt = "Christoph-Simon Senjak <christoph.senjak@googlemail.com>";
davidak = "David Kleuker <post@davidak.de>";
davidrusu = "David Rusu <davidrusu.me@gmail.com>";
dbohdan = "Danyil Bohdan <danyil.bohdan@gmail.com>";
deepfire = "Kosyrev Serge <_deepfire@feelingofgreen.ru>";
demin-dmitriy = "Dmitriy Demin <demindf@gmail.com>";
DerGuteMoritz = "Moritz Heidkamp <moritz@twoticketsplease.de>";
desiderius = "Didier J. Devroye <didier@devroye.name>";
devhell = "devhell <\"^\"@regexmail.net>";
dezgeg = "Tuomas Tynkkynen <tuomas.tynkkynen@iki.fi>";
dfoxfranke = "Daniel Fox Franke <dfoxfranke@gmail.com>";
dgonyeo = "Derek Gonyeo <derek@gonyeo.com>";
dmalikov = "Dmitry Malikov <malikov.d.y@gmail.com>";
dochang = "Desmond O. Chang <dochang@gmail.com>";
doublec = "Chris Double <chris.double@double.co.nz>";
ebzzry = "Rommel Martinez <ebzzry@gmail.com>";
ederoyd46 = "Matthew Brown <matt@ederoyd.co.uk>";
eduarrrd = "Eduard Bachmakov <e.bachmakov@gmail.com>";
edwtjo = "Edward Tjörnhammar <ed@cflags.cc>";
eelco = "Eelco Dolstra <eelco.dolstra@logicblox.com>";
ehegnes = "Eric Hegnes <eric.hegnes@gmail.com>";
ehmry = "Emery Hemingway <emery@vfemail.net>";
eikek = "Eike Kettner <eike.kettner@posteo.de>";
elasticdog = "Aaron Bull Schaefer <aaron@elasticdog.com>";
elitak = "Eric Litak <elitak@gmail.com>";
ellis = "Ellis Whitehead <nixos@ellisw.net>";
epitrochoid = "Mabry Cervin <mpcervin@uncg.edu>";
ericbmerritt = "Eric Merritt <eric@afiniate.com>";
ericsagnes = "Eric Sagnes <eric.sagnes@gmail.com>";
erikryb = "Erik Rybakken <erik.rybakken@math.ntnu.no>";
ertes = "Ertugrul Söylemez <ertesx@gmx.de>";
exi = "Reno Reckling <nixos@reckling.org>";
exlevan = "Alexey Levan <exlevan@gmail.com>";
fadenb = "Tristan Helmich <tristan.helmich+nixos@gmail.com>";
falsifian = "James Cook <james.cook@utoronto.ca>";
flosse = "Markus Kohlhase <mail@markus-kohlhase.de>";
fluffynukeit = "Daniel Austin <dan@fluffynukeit.com>";
forkk = "Andrew Okin <forkk@forkk.net>";
fornever = "Friedrich von Never <friedrich@fornever.me>";
fpletz = "Franz Pletz <fpletz@fnordicwalking.de>";
fps = "Florian Paul Schmidt <mista.tapas@gmx.net>";
fridh = "Frederik Rietdijk <fridh@fridh.nl>";
fro_ozen = "fro_ozen <fro_ozen@gmx.de>";
ftrvxmtrx = "Siarhei Zirukin <ftrvxmtrx@gmail.com>";
funfunctor = "Edward O'Callaghan <eocallaghan@alterapraxis.com>";
fuuzetsu = "Mateusz Kowalczyk <fuuzetsu@fuuzetsu.co.uk>";
fxfactorial = "Edgar Aroutiounian <edgar.factorial@gmail.com>";
gal_bolle = "Florent Becker <florent.becker@ens-lyon.org>";
garbas = "Rok Garbas <rok@garbas.si>";
garrison = "Jim Garrison <jim@garrison.cc>";
gavin = "Gavin Rogers <gavin@praxeology.co.uk>";
gebner = "Gabriel Ebner <gebner@gebner.org>";
gfxmonk = "Tim Cuthbertson <tim@gfxmonk.net>";
giogadi = "Luis G. Torres <lgtorres42@gmail.com>";
gleber = "Gleb Peregud <gleber.p@gmail.com>";
globin = "Robin Gloster <mail@glob.in>";
goibhniu = "Cillian de Róiste <cillian.deroiste@gmail.com>";
Gonzih = "Max Gonzih <gonzih@gmail.com>";
gridaphobe = "Eric Seidel <eric@seidel.io>";
guibert = "David Guibert <david.guibert@gmail.com>";
havvy = "Ryan Scheel <ryan.havvy@gmail.com>";
hbunke = "Hendrik Bunke <bunke.hendrik@gmail.com>";
henrytill = "Henry Till <henrytill@gmail.com>";
hiberno = "Christian Lask <mail@elfsechsundzwanzig.de>";
hinton = "Tom Hinton <t@larkery.com>";
hrdinka = "Christoph Hrdinka <c.nix@hrdinka.at>";
iand675 = "Ian Duncan <ian@iankduncan.com>";
ianwookim = "Ian-Woo Kim <ianwookim@gmail.com>";
iElectric = "Domen Kozar <domen@dev.si>";
igsha = "Igor Sharonov <igor.sharonov@gmail.com>";
ikervagyok = "Balázs Lengyel <ikervagyok@gmail.com>";
j-keck = "Jürgen Keck <jhyphenkeck@gmail.com>";
jagajaga = "Arseniy Seroka <ars.seroka@gmail.com>";
javaguirre = "Javier Aguirre <contacto@javaguirre.net>";
jb55 = "William Casarin <bill@casarin.me>";
jcumming = "Jack Cummings <jack@mudshark.org>";
jefdaj = "Jeffrey David Johnson <jefdaj@gmail.com>";
jfb = "James Felix Black <james@yamtime.com>";
jgeerds = "Jascha Geerds <jascha@jgeerds.name>";
jgillich = "Jakob Gillich <jakob@gillich.me>";
jirkamarsik = "Jirka Marsik <jiri.marsik89@gmail.com>";
joachifm = "Joachim Fasting <joachifm@fastmail.fm>";
joamaki = "Jussi Maki <joamaki@gmail.com>";
joelmo = "Joel Moberg <joel.moberg@gmail.com>";
joelteon = "Joel Taylor <me@joelt.io>";
jpbernardy = "Jean-Philippe Bernardy <jeanphilippe.bernardy@gmail.com>";
jwiegley = "John Wiegley <johnw@newartisans.com>";
jwilberding = "Jordan Wilberding <jwilberding@afiniate.com>";
jzellner = "Jeff Zellner <jeffz@eml.cc>";
kamilchm = "Kamil Chmielewski <kamil.chm@gmail.com>";
kampfschlaefer = "Arnold Krille <arnold@arnoldarts.de>";
kevincox = "Kevin Cox <kevincox@kevincox.ca>";
khumba = "Bryan Gardiner <bog@khumba.net>";
kkallio = "Karn Kallio <tierpluspluslists@gmail.com>";
koral = "Koral <koral@mailoo.org>";
kovirobi = "Kovacsics Robert <kovirobi@gmail.com>";
kragniz = "Louis Taylor <louis@kragniz.eu>";
ktosiek = "Tomasz Kontusz <tomasz.kontusz@gmail.com>";
lassulus = "Lassulus <lassulus@gmail.com>";
layus = "Guillaume Maudoux <layus.on@gmail.com>";
lebastr = "Alexander Lebedev <lebastr@gmail.com>";
leenaars = "Michiel Leenaars <ml.software@leenaa.rs>";
leonardoce = "Leonardo Cecchi <leonardo.cecchi@gmail.com>";
lethalman = "Luca Bruno <lucabru@src.gnome.org>";
lewo = "Antoine Eiche <lewo@abesis.fr>";
lhvwb = "Nathaniel Baxter <nathaniel.baxter@gmail.com>";
lihop = "Leroy Hopson <nixos@leroy.geek.nz>";
linquize = "Linquize <linquize@yahoo.com.hk>";
linus = "Linus Arver <linusarver@gmail.com>";
lnl7 = "Daiderd Jordan <daiderd@gmail.com>";
lovek323 = "Jason O'Conal <jason@oconal.id.au>";
lowfatcomputing = "Andreas Wagner <andreas.wagner@lowfatcomputing.org>";
lsix = "Lancelot SIX <lsix@lancelotsix.com>";
ludo = "Ludovic Courtès <ludo@gnu.org>";
luispedro = "Luis Pedro Coelho <luis@luispedro.org>";
lukasepple = "Lukas Epple <post@lukasepple.de>";
lukego = "Luke Gorrie <luke@snabb.co>";
lw = "Sergey Sofeychuk <lw@fmap.me>";
madjar = "Georges Dubus <georges.dubus@compiletoi.net>";
magnetophon = "Bart Brouns <bart@magnetophon.nl>";
mahe = "Matthias Herrmann <matthias.mh.herrmann@gmail.com>";
makefu = "Felix Richter <makefu@syntax-fehler.de>";
malyn = "Michael Alyn Miller <malyn@strangeGizmo.com>";
manveru = "Michael Fellinger <m.fellinger@gmail.com>";
marcweber = "Marc Weber <marco-oweber@gmx.de>";
markWot = "Markus Wotringer <markus@wotringer.de>";
matejc = "Matej Cotman <cotman.matej@gmail.com>";
mathnerd314 = "Mathnerd314 <mathnerd314.gph+hs@gmail.com>";
matthiasbeyer = "Matthias Beyer <mail@beyermatthias.de>";
maurer = "Matthew Maurer <matthew.r.maurer+nix@gmail.com>";
mbakke = "Marius Bakke <ymse@tuta.io>";
mbe = "Brandon Edens <brandonedens@gmail.com>";
mcmtroffaes = "Matthias C. M. Troffaes <matthias.troffaes@gmail.com>";
meditans = "Carlo Nucera <meditans@gmail.com>";
meisternu = "Matt Miemiec <meister@krutt.org>";
michaelpj = "Michael Peyton Jones <michaelpj@gmail.com>";
michelk = "Michel Kuhlmann <michel@kuhlmanns.info>";
mingchuan = "Ming Chuan <ming@culpring.com>";
mirdhyn = "Merlin Gaillard <mirdhyn@gmail.com>";
modulistic = "Pablo Costa <modulistic@gmail.com>";
mog = "Matthew O'Gorman <mog-lists@rldn.net>";
mornfall = "Petr Ročkai <me@mornfall.net>";
MP2E = "Cray Elliott <MP2E@archlinux.us>";
msackman = "Matthew Sackman <matthew@wellquite.org>";
mschristiansen = "Mikkel Christiansen <mikkel@rheosystems.com>";
msteen = "Matthijs Steen <emailmatthijs@gmail.com>";
mtreskin = "Max Treskin <zerthurd@gmail.com>";
mudri = "James Wood <lamudri@gmail.com>";
muflax = "Stefan Dorn <mail@muflax.com>";
nathan-gs = "Nathan Bijnens <nathan@nathan.gs>";
nckx = "Tobias Geerinckx-Rice <tobias.geerinckx.rice@gmail.com>";
nequissimus = "Tim Steinbach <tim@nequissimus.com>";
nfjinjing = "Jinjing Wang <nfjinjing@gmail.com>";
nico202 = "Nicolò Balzarotti <anothersms@gmail.com>";
notthemessiah = "Brian Cohen <brian.cohen.88@gmail.com>";
np = "Nicolas Pouillard <np.nix@nicolaspouillard.fr>";
nslqqq = "Nikita Mikhailov <nslqqq@gmail.com>";
obadz = "obadz <dav-nixos@odav.org>";
ocharles = "Oliver Charles <ollie@ocharles.org.uk>";
odi = "Oliver Dunkl <oliver.dunkl@gmail.com>";
offline = "Jaka Hudoklin <jakahudoklin@gmail.com>";
olcai = "Erik Timan <dev@timan.info>";
orbitz = "Malcolm Matalka <mmatalka@gmail.com>";
osener = "Ozan Sener <ozan@ozansener.com>";
oxij = "Jan Malakhovski <oxij@oxij.org>";
page = "Carles Pagès <page@cubata.homelinux.net>";
paholg = "Paho Lurie-Gregg <paho@paholg.com>";
pakhfn = "Fedor Pakhomov <pakhfn@gmail.com>";
palo = "Ingolf Wanger <palipalo9@googlemail.com>";
pashev = "Igor Pashev <pashev.igor@gmail.com>";
pesterhazy = "Paulus Esterhazy <pesterhazy@gmail.com>";
philandstuff = "Philip Potter <philip.g.potter@gmail.com>";
phile314 = "Philipp Hausmann <nix@314.ch>";
Phlogistique = "Noé Rubinstein <noe.rubinstein@gmail.com>";
phreedom = "Evgeny Egorochkin <phreedom@yandex.ru>";
phunehehe = "Hoang Xuan Phu <phunehehe@gmail.com>";
pierron = "Nicolas B. Pierron <nixos@nbp.name>";
piotr = "Piotr Pietraszkiewicz <ppietrasa@gmail.com>";
pjbarnoy = "Perry Barnoy <pjbarnoy@gmail.com>";
pjones = "Peter Jones <pjones@devalot.com>";
pkmx = "Chih-Mao Chen <pkmx.tw@gmail.com>";
plcplc = "Philip Lykke Carlsen <plcplc@gmail.com>";
pmahoney = "Patrick Mahoney <pat@polycrystal.org>";
pmiddend = "Philipp Middendorf <pmidden@secure.mailbox.org>";
prikhi = "Pavan Rikhi <pavan.rikhi@gmail.com>";
profpatsch = "Profpatsch <mail@profpatsch.de>";
psibi = "Sibi <sibi@psibi.in>";
pSub = "Pascal Wittmann <mail@pascal-wittmann.de>";
puffnfresh = "Brian McKenna <brian@brianmckenna.org>";
pxc = "Patrick Callahan <patrick.callahan@latitudeengineering.com>";
qknight = "Joachim Schiele <js@lastlog.de>";
ragge = "Ragnar Dahlen <r.dahlen@gmail.com>";
raskin = "Michael Raskin <7c6f434c@mail.ru>";
redbaron = "Maxim Ivanov <ivanov.maxim@gmail.com>";
refnil = "Martin Lavoie <broemartino@gmail.com>";
relrod = "Ricky Elrod <ricky@elrod.me>";
renzo = "Renzo Carbonara <renzocarbonara@gmail.com>";
rick68 = "Wei-Ming Yang <rick68@gmail.com>";
rickynils = "Rickard Nilsson <rickynils@gmail.com>";
rnhmjoj = "Michele Guerini Rocco <micheleguerinirocco@me.com>";
rob = "Rob Vermaas <rob.vermaas@gmail.com>";
robberer = "Longrin Wischnewski <robberer@freakmail.de>";
robbinch = "Robbin C. <robbinch33@gmail.com>";
robgssp = "Rob Glossop <robgssp@gmail.com>";
roconnor = "Russell O'Connor <roconnor@theorem.ca>";
romildo = "José Romildo Malaquias <malaquias@gmail.com>";
rszibele = "Richard Szibele <richard_szibele@hotmail.com>";
rushmorem = "Rushmore Mushambi <rushmore@webenchanter.com>";
rvl = "Rodney Lorrimar <dev+nix@rodney.id.au>";
rvlander = "Gaëtan André <rvlander@gaetanandre.eu>";
ryantm = "Ryan Mulligan <ryan@ryantm.com>";
rycee = "Robert Helgesson <robert@rycee.net>";
samuelrivas = "Samuel Rivas <samuelrivas@gmail.com>";
sander = "Sander van der Burg <s.vanderburg@tudelft.nl>";
schmitthenner = "Fabian Schmitthenner <development@schmitthenner.eu>";
schristo = "Scott Christopher <schristopher@konputa.com>";
sepi = "Raffael Mancini <raffael@mancini.lu>";
sheganinans = "Aistis Raulinaitis <sheganinans@gmail.com>";
shell = "Shell Turner <cam.turn@gmail.com>";
shlevy = "Shea Levy <shea@shealevy.com>";
simons = "Peter Simons <simons@cryp.to>";
simonvandel = "Simon Vandel Sillesen <simon.vandel@gmail.com>";
sjagoe = "Simon Jagoe <simon@simonjagoe.com>";
sjmackenzie = "Stewart Mackenzie <setori88@gmail.com>";
sjourdois = "Stéphane kwisatz Jourdois <sjourdois@gmail.com>";
skeidel = "Sven Keidel <svenkeidel@gmail.com>";
sleexyz = "Sean Lee <freshdried@gmail.com>";
smironov = "Sergey Mironov <ierton@gmail.com>";
spacefrogg = "Michael Raitza <spacefrogg-nixos@meterriblecrew.net>";
spencerjanssen = "Spencer Janssen <spencerjanssen@gmail.com>";
spinus = "Tomasz Czyż <tomasz.czyz@gmail.com>";
sprock = "Roger Mason <rmason@mun.ca>";
spwhitt = "Spencer Whitt <sw@swhitt.me>";
stephenmw = "Stephen Weinberg <stephen@q5comm.com>";
steveej = "Stefan Junker <mail@stefanjunker.de>";
szczyp = "Szczyp <qb@szczyp.com>";
sztupi = "Attila Sztupak <attila.sztupak@gmail.com>";
taeer = "Taeer Bar-Yam <taeer@necsi.edu>";
tailhook = "Paul Colomiets <paul@colomiets.name>";
taktoa = "Remy Goldschmidt <taktoa@gmail.com>";
tavyc = "Octavian Cerna <octavian.cerna@gmail.com>";
telotortium = "Robert Irelan <rirelan@gmail.com>";
thall = "Niclas Thall <niclas.thall@gmail.com>";
thammers = "Tobias Hammerschmidt <jawr@gmx.de>";
the-kenny = "Moritz Ulrich <moritz@tarn-vedra.de>";
theuni = "Christian Theune <ct@flyingcircus.io>";
thoughtpolice = "Austin Seipp <aseipp@pobox.com>";
titanous = "Jonathan Rudenberg <jonathan@titanous.com>";
tohl = "Tomas Hlavaty <tom@logand.com>";
tokudan = "Daniel Frank <git@danielfrank.net>";
tomberek = "Thomas Bereknyei <tomberek@gmail.com>";
travisbhartwell = "Travis B. Hartwell <nafai@travishartwell.net>";
trino = "Hubert Mühlhans <muehlhans.hubert@ekodia.de>";
tstrobel = "Thomas Strobel <ts468@cam.ac.uk>";
ttuegel = "Thomas Tuegel <ttuegel@gmail.com>";
tv = "Tomislav Viljetić <tv@shackspace.de>";
tvestelind = "Tomas Vestelind <tomas.vestelind@fripost.org>";
twey = "James Twey Kay <twey@twey.co.uk>";
urkud = "Yury G. Kudryashov <urkud+nix@ya.ru>";
vandenoever = "Jos van den Oever <jos@vandenoever.info>";
vanzef = "Ivan Solyankin <vanzef@gmail.com>";
vbgl = "Vincent Laporte <Vincent.Laporte@gmail.com>";
vbmithr = "Vincent Bernardoff <vb@luminar.eu.org>";
vcunat = "Vladimír Čunát <vcunat@gmail.com>";
viric = "Lluís Batlle i Rossell <viric@viric.name>";
vizanto = "Danny Wilson <danny@prime.vc>";
vlstill = "Vladimír Štill <xstill@fi.muni.cz>";
vmandela = "Venkateswara Rao Mandela <venkat.mandela@gmail.com>";
vozz = "Oliver Hunt <oliver.huntuk@gmail.com>";
wedens = "wedens <kirill.wedens@gmail.com>";
willtim = "Tim Philip Williams <tim.williams.public@gmail.com>";
winden = "Antonio Vargas Gonzalez <windenntw@gmail.com>";
wizeman = "Ricardo M. Correia <rcorreia@wizy.org>";
wjlroe = "William Roe <willroe@gmail.com>";
wkennington = "William A. Kennington III <william@wkennington.com>";
wmertens = "Wout Mertens <Wout.Mertens@gmail.com>";
womfoo = "Kranium Gikos Mendoza <kranium@gikos.net>";
wscott = "Wayne Scott <wsc9tt@gmail.com>";
wyvie = "Elijah Rum <elijahrum@gmail.com>";
yarr = "Dmitry V. <savraz@gmail.com>";
z77z = "Marco Maggesi <maggesi@math.unifi.it>";
zagy = "Christian Zagrodnick <cz@flyingcircus.io>";
zef = "Zef Hemel <zef@zef.me>";
zimbatm = "zimbatm <zimbatm@zimbatm.com>";
zohl = "Al Zohali <zohl@fmap.me>";
zoomulator = "Kim Simmons <zoomulator@gmail.com>";
}

View File

@@ -1,7 +1,8 @@
/* Some functions for manipulating meta attributes, as well as the
name attribute. */
{ lib }:
let lib = import ./default.nix;
in
rec {
@@ -16,11 +17,6 @@ rec {
drv // { meta = (drv.meta or {}) // newAttrs; };
/* Disable Hydra builds of given derivation.
*/
dontDistribute = drv: addMetaAttrs { hydraPlatforms = []; } drv;
/* Change the symbolic name of a package for presentation purposes
(i.e., so that nix-env users can tell them apart).
*/
@@ -49,7 +45,7 @@ rec {
/* Decrease the nix-env priority of the package, i.e., other
versions/variants of the package will be preferred.
*/
lowPrio = drv: addMetaAttrs { priority = 10; } drv;
lowPrio = drv: addMetaAttrs { priority = "10"; } drv;
/* Apply lowPrio to an attrset with derivations
@@ -60,30 +56,11 @@ rec {
/* Increase the nix-env priority of the package, i.e., this
version/variant of the package will be preferred.
*/
hiPrio = drv: addMetaAttrs { priority = -10; } drv;
hiPrio = drv: addMetaAttrs { priority = "-10"; } drv;
/* Apply hiPrio to an attrset with derivations
*/
hiPrioSet = set: mapDerivationAttrset hiPrio set;
/* Check to see if a platform is matched by the given `meta.platforms`
element.
A `meta.platform` pattern is either
1. (legacy) a system string.
2. (modern) a pattern for the platform `parsed` field.
We can inject these into a patten for the whole of a structured platform,
and then match that.
*/
platformMatch = platform: elem: let
pattern =
if builtins.isString elem
then { system = elem; }
else { parsed = elem; };
in lib.matchAttrs pattern platform;
}

View File

@@ -1,2 +1,2 @@
# Expose the minimum required version for evaluating Nixpkgs
"1.11"
"1.10"

View File

@@ -1,12 +1,9 @@
{ lib }:
with lib.lists;
with lib.strings;
with lib.trivial;
with lib.attrsets;
with lib.options;
with lib.debug;
with lib.types;
with import ./lists.nix;
with import ./trivial.nix;
with import ./attrsets.nix;
with import ./options.nix;
with import ./debug.nix;
with import ./types.nix;
rec {
@@ -22,8 +19,7 @@ rec {
, prefix ? []
, # This should only be used for special arguments that need to be evaluated
# when resolving module structure (like in imports). For everything else,
# there's _module.args. If specialArgs.modulesPath is defined it will be
# used as the base path for disabledModules.
# there's _module.args.
specialArgs ? {}
, # This would be remove in the future, Prefer _module.args option instead.
args ? {}
@@ -59,9 +55,12 @@ rec {
};
};
closed = closeModules (modules ++ [ internalModule ]) ({ inherit config options lib; } // specialArgs);
closed = closeModules (modules ++ [ internalModule ]) ({ inherit config options; lib = import ./.; } // specialArgs);
options = mergeModules prefix (reverseList (filterModules (specialArgs.modulesPath or "") closed));
# Note: the list of modules is reversed to maintain backward
# compatibility with the old module system. Not sure if this is
# the most sensible policy.
options = mergeModules prefix (reverseList closed);
# Traverse options and extract the option values into the final
# config set. At the same time, check whether all option
@@ -87,20 +86,10 @@ rec {
result = { inherit options config; };
in result;
# Filter disabled modules. Modules can be disabled allowing
# their implementation to be replaced.
filterModules = modulesPath: modules:
let
moduleKey = m: if isString m then toString modulesPath + "/" + m else toString m;
disabledKeys = map moduleKey (concatMap (m: m.disabledModules) modules);
in
filter (m: !(elem m.key disabledKeys)) modules;
/* Close a set of modules under the imports relation. */
closeModules = modules: args:
let
toClosureList = file: parentKey: imap1 (n: x:
toClosureList = file: parentKey: imap (n: x:
if isAttrs x || isFunction x then
let key = "${parentKey}:anon-${toString n}"; in
unifyModuleSyntax file key (unpackSubmodule (applyIfFunction key) x args)
@@ -116,29 +105,23 @@ rec {
/* Massage a module into canonical form, that is, a set consisting
of options, config and imports attributes. */
unifyModuleSyntax = file: key: m:
let metaSet = if m ? meta
then { meta = m.meta; }
else {};
in
if m ? config || m ? options then
let badAttrs = removeAttrs m ["_file" "key" "disabledModules" "imports" "options" "config" "meta"]; in
let badAttrs = removeAttrs m ["imports" "options" "config" "key" "_file"]; in
if badAttrs != {} then
throw "Module `${key}' has an unsupported attribute `${head (attrNames badAttrs)}'. This is caused by assignments to the top-level attributes `config' or `options'."
else
{ file = m._file or file;
key = toString m.key or key;
disabledModules = m.disabledModules or [];
imports = m.imports or [];
options = m.options or {};
config = mkMerge [ (m.config or {}) metaSet ];
config = m.config or {};
}
else
{ file = m._file or file;
key = toString m.key or key;
disabledModules = m.disabledModules or [];
imports = m.require or [] ++ m.imports or [];
options = {};
config = mkMerge [ (removeAttrs m ["_file" "key" "disabledModules" "require" "imports"]) metaSet ];
config = removeAttrs m ["key" "_file" "require" "imports"];
};
applyIfFunction = key: f: args@{ config, options, lib, ... }: if isFunction f then
@@ -155,11 +138,11 @@ rec {
# a module will resolve strictly the attributes used as argument but
# not their values. The values are forwarding the result of the
# evaluation of the option.
requiredArgs = builtins.attrNames (lib.functionArgs f);
requiredArgs = builtins.attrNames (builtins.functionArgs f);
context = name: ''while evaluating the module argument `${name}' in "${key}":'';
extraArgs = builtins.listToAttrs (map (name: {
inherit name;
value = builtins.addErrorContext (context name)
value = addErrorContext (context name)
(args.${name} or config._module.args.${name});
}) requiredArgs);
@@ -192,53 +175,29 @@ rec {
(concatMap (m: map (config: { inherit (m) file; inherit config; }) (pushDownProperties m.config)) modules);
mergeModules' = prefix: options: configs:
let
/* byName is like foldAttrs, but will look for attributes to merge in the
specified attribute name.
byName "foo" (module: value: ["module.hidden=${module.hidden},value=${value}"])
[
{
hidden="baz";
foo={qux="bar"; gla="flop";};
}
{
hidden="fli";
foo={qux="gne"; gli="flip";};
}
]
===>
{
gla = [ "module.hidden=baz,value=flop" ];
gli = [ "module.hidden=fli,value=flip" ];
qux = [ "module.hidden=baz,value=bar" "module.hidden=fli,value=gne" ];
}
*/
byName = attr: f: modules: foldl' (acc: module:
foldl' (inner: name:
inner // { ${name} = (acc.${name} or []) ++ (f module module.${attr}.${name}); }
) acc (attrNames module.${attr})
) {} modules;
# an attrset 'name' => list of submodules that declare name.
declsByName = byName "options"
(module: option: [{ inherit (module) file; options = option; }])
options;
# an attrset 'name' => list of submodules that define name.
defnsByName = byName "config" (module: value:
map (config: { inherit (module) file; inherit config; }) (pushDownProperties value)
) configs;
# extract the definitions for each loc
defnsByName' = byName "config"
(module: value: [{ inherit (module) file; inherit value; }])
configs;
in
(flip mapAttrs declsByName (name: decls:
listToAttrs (map (name: {
# We're descending into attribute name.
inherit name;
value =
let
loc = prefix ++ [name];
defns = defnsByName.${name} or [];
defns' = defnsByName'.${name} or [];
# Get all submodules that declare name.
decls = concatMap (m:
if m.options ? ${name}
then [ { inherit (m) file; options = m.options.${name}; } ]
else []
) options;
# Get all submodules that define name.
defns = concatMap (m:
if m.config ? ${name}
then map (config: { inherit (m) file; inherit config; })
(pushDownProperties m.config.${name})
else []
) configs;
nrOptions = count (m: isOption m.options) decls;
# Extract the definitions for this loc
defns' = map (m: { inherit (m) file; value = m.config.${name}; })
(filter (m: m.config ? ${name}) configs);
in
if nrOptions == length decls then
let opt = fixupOptionType loc (mergeOptionDecls loc decls);
@@ -250,8 +209,8 @@ rec {
in
throw "The option `${showOption loc}' in `${firstOption.file}' is a prefix of options in `${firstNonOption.file}'."
else
mergeModules' loc decls defns
))
mergeModules' loc decls defns;
}) (concatMap (m: attrNames m.options) options))
// { _definedNames = map (m: { inherit (m) file; names = attrNames m.config; }) configs; };
/* Merge multiple option declarations into a single declaration. In
@@ -267,20 +226,12 @@ rec {
correspond to the definition of 'loc' in 'opt.file'. */
mergeOptionDecls = loc: opts:
foldl' (res: opt:
let t = res.type;
t' = opt.options.type;
mergedType = t.typeMerge t'.functor;
typesMergeable = mergedType != null;
typeSet = if (bothHave "type") && typesMergeable
then { type = mergedType; }
else {};
bothHave = k: opt.options ? ${k} && res ? ${k};
in
if bothHave "default" ||
bothHave "example" ||
bothHave "description" ||
bothHave "apply" ||
(bothHave "type" && (! typesMergeable))
if opt.options ? default && res ? default ||
opt.options ? example && res ? example ||
opt.options ? description && res ? description ||
opt.options ? apply && res ? apply ||
# Accept to merge options which have identical types.
opt.options ? type && res ? type && opt.options.type.name != res.type.name
then
throw "The option `${showOption loc}' in `${opt.file}' is already declared in ${showFiles res.declarations}."
else
@@ -302,7 +253,7 @@ rec {
in opt.options // res //
{ declarations = res.declarations ++ [opt.file];
options = submodules;
} // typeSet
}
) { inherit loc; declarations = []; options = []; } opts;
/* Merge all the definitions of an option to produce the final
@@ -333,8 +284,7 @@ rec {
res.mergedValue;
in opt //
{ value = builtins.addErrorContext "while evaluating the option `${showOption loc}':" value;
inherit (res.defsFinal') highestPrio;
{ value = addErrorContext "while evaluating the option `${showOption loc}':" value;
definitions = map (def: def.value) res.defsFinal;
files = map (def: def.file) res.defsFinal;
inherit (res) isDefined;
@@ -342,7 +292,7 @@ rec {
# Merge definitions of a value of a given type.
mergeDefinitions = loc: type: defs: rec {
defsFinal' =
defsFinal =
let
# Process mkMerge and mkIf properties.
defs' = concatMap (m:
@@ -350,25 +300,20 @@ rec {
) defs;
# Process mkOverride properties.
defs'' = filterOverrides' defs';
defs'' = filterOverrides defs';
# Sort mkOrder properties.
defs''' =
# Avoid sorting if we don't have to.
if any (def: def.value._type or "" == "order") defs''.values
then sortProperties defs''.values
else defs''.values;
in {
values = defs''';
inherit (defs'') highestPrio;
};
defsFinal = defsFinal'.values;
if any (def: def.value._type or "" == "order") defs''
then sortProperties defs''
else defs'';
in defs''';
# Type-check the remaining definitions, and merge them.
mergedValue = foldl' (res: def:
if type.check def.value then res
else throw "The option value `${showOption loc}' in `${def.file}' is not of type `${type.description}'.")
else throw "The option value `${showOption loc}' in `${def.file}' is not a ${type.name}.")
(type.merge loc defsFinal) defsFinal;
isDefined = defsFinal != [];
@@ -417,13 +362,10 @@ rec {
if def._type or "" == "merge" then
concatMap dischargeProperties def.contents
else if def._type or "" == "if" then
if isBool def.condition then
if def.condition then
dischargeProperties def.content
else
[ ]
if def.condition then
dischargeProperties def.content
else
throw "mkIf called with a non-Boolean condition"
[ ]
else
[ def ];
@@ -446,21 +388,16 @@ rec {
Note that "z" has the default priority 100.
*/
filterOverrides = defs: (filterOverrides' defs).values;
filterOverrides' = defs:
filterOverrides = defs:
let
defaultPrio = 100;
getPrio = def: if def.value._type or "" == "override" then def.value.priority else defaultPrio;
highestPrio = foldl' (prio: def: min (getPrio def) prio) 9999 defs;
strip = def: if def.value._type or "" == "override" then def // { value = def.value.content; } else def;
in {
values = concatMap (def: if getPrio def == highestPrio then [(strip def)] else []) defs;
inherit highestPrio;
};
in concatMap (def: if getPrio def == highestPrio then [(strip def)] else []) defs;
/* Sort a list of properties. The sort priority of a property is
1000 by default, but can be overridden by wrapping the property
1000 by default, but can be overriden by wrapping the property
using mkOrder. */
sortProperties = defs:
let
@@ -480,14 +417,12 @@ rec {
options = opt.options or
(throw "Option `${showOption loc'}' has type optionSet but has no option attribute, in ${showFiles opt.declarations}.");
f = tp:
let optionSetIn = type: (tp.name == type) && (tp.functor.wrapped.name == "optionSet");
in
if tp.name == "option set" || tp.name == "submodule" then
throw "The option ${showOption loc} uses submodules without a wrapping type, in ${showFiles opt.declarations}."
else if optionSetIn "attrsOf" then types.attrsOf (types.submodule options)
else if optionSetIn "loaOf" then types.loaOf (types.submodule options)
else if optionSetIn "listOf" then types.listOf (types.submodule options)
else if optionSetIn "nullOr" then types.nullOr (types.submodule options)
else if tp.name == "attribute set of option sets" then types.attrsOf (types.submodule options)
else if tp.name == "list or attribute set of option sets" then types.loaOf (types.submodule options)
else if tp.name == "list of option sets" then types.listOf (types.submodule options)
else if tp.name == "null or option set" then types.nullOr (types.submodule options)
else tp;
in
if opt.type.getSubModules or null == null
@@ -517,7 +452,7 @@ rec {
inherit priority content;
};
mkOptionDefault = mkOverride 1500; # priority of option defaults
mkOptionDefault = mkOverride 1001; # priority of option defaults
mkDefault = mkOverride 1000; # used in config sections of non-user modules to set a default
mkForce = mkOverride 50;
mkVMOverride = mkOverride 10; # used by nixos-rebuild build-vm
@@ -556,7 +491,9 @@ rec {
#
mkAliasDefinitions = mkAliasAndWrapDefinitions id;
mkAliasAndWrapDefinitions = wrap: option:
mkIf (isOption option && option.isDefined) (wrap (mkMerge option.definitions));
mkMerge
(optional (isOption option && option.isDefined)
(wrap (mkMerge option.definitions)));
/* Compatibility. */
@@ -566,25 +503,19 @@ rec {
/* Return a module that causes a warning to be shown if the
specified option is defined. For example,
mkRemovedOptionModule [ "boot" "loader" "grub" "bootDevice" ] "<replacement instructions>"
mkRemovedOptionModule [ "boot" "loader" "grub" "bootDevice" ]
causes a warning if the user defines boot.loader.grub.bootDevice.
replacementInstructions is a string that provides instructions on
how to achieve the same functionality without the removed option,
or alternatively a reasoning why the functionality is not needed.
replacementInstructions SHOULD be provided!
*/
mkRemovedOptionModule = optionName: replacementInstructions:
mkRemovedOptionModule = optionName:
{ options, ... }:
{ options = setAttrByPath optionName (mkOption {
visible = false;
});
config.warnings =
let opt = getAttrFromPath optionName options; in
optional opt.isDefined ''
The option definition `${showOption optionName}' in ${showFiles opt.files} no longer has any effect; please remove it.
${replacementInstructions}'';
optional opt.isDefined
"The option definition `${showOption optionName}' in ${showFiles opt.files} no longer has any effect; please remove it.";
};
/* Return a module that causes a warning to be shown if the
@@ -604,84 +535,6 @@ rec {
use = builtins.trace "Obsolete option `${showOption from}' is used. It was renamed to `${showOption to}'.";
};
/* Return a module that causes a warning to be shown if any of the "from"
option is defined; the defined values can be used in the "mergeFn" to set
the "to" value.
This function can be used to merge multiple options into one that has a
different type.
"mergeFn" takes the module "config" as a parameter and must return a value
of "to" option type.
mkMergedOptionModule
[ [ "a" "b" "c" ]
[ "d" "e" "f" ] ]
[ "x" "y" "z" ]
(config:
let value = p: getAttrFromPath p config;
in
if (value [ "a" "b" "c" ]) == true then "foo"
else if (value [ "d" "e" "f" ]) == true then "bar"
else "baz")
- options.a.b.c is a removed boolean option
- options.d.e.f is a removed boolean option
- options.x.y.z is a new str option that combines a.b.c and d.e.f
functionality
This show a warning if any a.b.c or d.e.f is set, and set the value of
x.y.z to the result of the merge function
*/
mkMergedOptionModule = from: to: mergeFn:
{ config, options, ... }:
{
options = foldl recursiveUpdate {} (map (path: setAttrByPath path (mkOption {
visible = false;
# To use the value in mergeFn without triggering errors
default = "_mkMergedOptionModule";
})) from);
config = {
warnings = filter (x: x != "") (map (f:
let val = getAttrFromPath f config;
opt = getAttrFromPath f options;
in
optionalString
(val != "_mkMergedOptionModule")
"The option `${showOption f}' defined in ${showFiles opt.files} has been changed to `${showOption to}' that has a different type. Please read `${showOption to}' documentation and update your configuration accordingly."
) from);
} // setAttrByPath to (mkMerge
(optional
(any (f: (getAttrFromPath f config) != "_mkMergedOptionModule") from)
(mergeFn config)));
};
/* Single "from" version of mkMergedOptionModule.
Return a module that causes a warning to be shown if the "from" option is
defined; the defined value can be used in the "mergeFn" to set the "to"
value.
This function can be used to change an option into another that has a
different type.
"mergeFn" takes the module "config" as a parameter and must return a value of
"to" option type.
mkChangedOptionModule [ "a" "b" "c" ] [ "x" "y" "z" ]
(config:
let value = getAttrFromPath [ "a" "b" "c" ] config;
in
if value > 100 then "high"
else "normal")
- options.a.b.c is a removed int option
- options.x.y.z is a new str option that supersedes a.b.c
This show a warning if a.b.c is set, and set the value of x.y.z to the
result of the change function
*/
mkChangedOptionModule = from: to: changeFn:
mkMergedOptionModule [ from ] to changeFn;
/* Like mkRenamedOptionModule, but doesn't show a warning. */
mkAliasOptionModule = from: to: doRename {
inherit from to;
@@ -691,25 +544,23 @@ rec {
};
doRename = { from, to, visible, warn, use }:
{ config, options, ... }:
let
fromOpt = getAttrFromPath from options;
toOf = attrByPath to
(abort "Renaming error: option `${showOption to}' does not exist.");
(abort "Renaming error: option `${showOption to}' does not exists.");
in
{
options = setAttrByPath from (mkOption {
inherit visible;
description = "Alias of <option>${showOption to}</option>.";
apply = x: use (toOf config);
});
config = mkMerge [
{
warnings = optional (warn && fromOpt.isDefined)
"The option `${showOption from}' defined in ${showFiles fromOpt.files} has been renamed to `${showOption to}'.";
}
(mkAliasAndWrapDefinitions (setAttrByPath to) fromOpt)
];
};
{ config, options, ... }:
{ options = setAttrByPath from (mkOption {
description = "Alias of <option>${showOption to}</option>.";
apply = x: use (toOf config);
});
config = {
/*
warnings =
let opt = getAttrFromPath from options; in
optional (warn && opt.isDefined)
"The option `${showOption from}' defined in ${showFiles opt.files} has been renamed to `${showOption to}'.";
*/
} // setAttrByPath to (mkAliasDefinitions (getAttrFromPath from options));
};
}

View File

@@ -1,78 +1,40 @@
# Nixpkgs/NixOS option handling.
{ lib }:
with lib.trivial;
with lib.lists;
with lib.attrsets;
with lib.strings;
let lib = import ./default.nix; in
with import ./trivial.nix;
with import ./lists.nix;
with import ./attrsets.nix;
with import ./strings.nix;
rec {
/* Returns true when the given argument is an option
Type: isOption :: a -> bool
Example:
isOption 1 // => false
isOption (mkOption {}) // => true
*/
isOption = lib.isType "option";
/* Creates an Option attribute set. mkOption accepts an attribute set with the following keys:
All keys default to `null` when not given.
Example:
mkOption { } // => { _type = "option"; }
mkOption { defaultText = "foo"; } // => { _type = "option"; defaultText = "foo"; }
*/
mkOption =
{
# Default value used when no definition is given in the configuration.
default ? null,
# Textual representation of the default, for the manual.
defaultText ? null,
# Example value used in the manual.
example ? null,
# String describing the option.
description ? null,
# Related packages used in the manual (see `genRelatedPackages` in ../nixos/doc/manual/default.nix).
relatedPackages ? null,
# Option type, providing type-checking and value merging.
type ? null,
# Function that converts the option value to something else.
apply ? null,
# Whether the option is for NixOS developers only.
internal ? null,
# Whether the option shows up in the manual.
visible ? null,
# Whether the option can be set only once
readOnly ? null,
# Obsolete, used by types.optionSet.
options ? null
{ default ? null # Default value used when no definition is given in the configuration.
, defaultText ? null # Textual representation of the default, for in the manual.
, example ? null # Example value used in the manual.
, description ? null # String describing the option.
, type ? null # Option type, providing type-checking and value merging.
, apply ? null # Function that converts the option value to something else.
, internal ? null # Whether the option is for NixOS developers only.
, visible ? null # Whether the option shows up in the manual.
, readOnly ? null # Whether the option can be set only once
, options ? null # Obsolete, used by types.optionSet.
} @ attrs:
attrs // { _type = "option"; };
/* Creates an Option attribute set for a boolean value option i.e an
option to be toggled on or off:
Example:
mkEnableOption "foo"
=> { _type = "option"; default = false; description = "Whether to enable foo."; example = true; type = { ... }; }
*/
mkEnableOption =
# Name for the created option
name: mkOption {
mkEnableOption = name: mkOption {
default = false;
example = true;
description = "Whether to enable ${name}.";
type = lib.types.bool;
};
/* This option accepts anything, but it does not produce any result.
This is useful for sharing a module across different module sets
without having to implement similar features as long as the
values of the options are not accessed. */
# This option accept anything, but it does not produce any result. This
# is useful for sharing a module across different module sets without
# having to implement similar features as long as the value of the options
# are not expected.
mkSinkUndeclaredOptions = attrs: mkOption ({
internal = true;
visible = false;
@@ -112,26 +74,10 @@ rec {
else
val) (head defs).value defs;
/* Extracts values of all "value" keys of the given list.
Type: getValues :: [ { value :: a } ] -> [a]
Example:
getValues [ { value = 1; } { value = 2; } ] // => [ 1 2 ]
getValues [ ] // => [ ]
*/
getValues = map (x: x.value);
/* Extracts values of all "file" keys of the given list
Type: getFiles :: [ { file :: a } ] -> [a]
Example:
getFiles [ { file = "file1"; } { file = "file2"; } ] // => [ "file1" "file2" ]
getFiles [ ] // => [ ]
*/
getFiles = map (x: x.file);
# Generate documentation template from the list of option declaration like
# the set generated with filterOptionSets.
optionAttrSetToDocList = optionAttrSetToDocList' [];
@@ -140,19 +86,17 @@ rec {
concatMap (opt:
let
docOption = rec {
loc = opt.loc;
name = showOption opt.loc;
description = opt.description or (throw "Option `${name}' has no description.");
declarations = filter (x: x != unknownModule) opt.declarations;
internal = opt.internal or false;
visible = opt.visible or true;
readOnly = opt.readOnly or false;
type = opt.type.description or null;
type = opt.type.name or null;
}
// optionalAttrs (opt ? example) { example = scrubOptionValue opt.example; }
// optionalAttrs (opt ? default) { default = scrubOptionValue opt.default; }
// optionalAttrs (opt ? defaultText) { default = opt.defaultText; }
// optionalAttrs (opt ? relatedPackages && opt.relatedPackages != null) { inherit (opt) relatedPackages; };
// (if opt ? example then { example = scrubOptionValue opt.example; } else {})
// (if opt ? default then { default = scrubOptionValue opt.default; } else {})
// (if opt ? defaultText then { default = opt.defaultText; } else {});
subOptions =
let ss = opt.type.getSubOptions opt.loc;
@@ -162,13 +106,10 @@ rec {
/* This function recursively removes all derivation attributes from
`x` except for the `name` attribute.
This is to make the generation of `options.xml` much more
efficient: the XML representation of derivations is very large
(on the order of megabytes) and is not actually used by the
manual generator.
*/
`x' except for the `name' attribute. This is to make the
generation of `options.xml' much more efficient: the XML
representation of derivations is very large (on the order of
megabytes) and is not actually used by the manual generator. */
scrubOptionValue = x:
if isDerivation x then
{ type = "derivation"; drvPath = x.name; outPath = x.name; name = x.name; }
@@ -177,29 +118,15 @@ rec {
else x;
/* For use in the `example` option attribute. It causes the given
text to be included verbatim in documentation. This is necessary
for example values that are not simple values, e.g., functions.
*/
/* For use in the example option attribute. It causes the given
text to be included verbatim in documentation. This is necessary
for example values that are not simple values, e.g.,
functions. */
literalExample = text: { _type = "literalExample"; inherit text; };
# Helper functions.
/* Convert an option, described as a list of the option parts in to a
safe, human readable version.
Example:
(showOption ["foo" "bar" "baz"]) == "foo.bar.baz"
(showOption ["foo" "bar.baz" "tux"]) == "foo.\"bar.baz\".tux"
*/
showOption = parts: let
escapeOptionPart = part:
let
escaped = lib.strings.escapeNixString part;
in if escaped == "\"${part}\""
then part
else escaped;
in (concatStringsSep ".") (map escapeOptionPart parts);
/* Helper functions. */
showOption = concatStringsSep ".";
showFiles = files: concatStringsSep " and " (map (f: "`${f}'") files);
unknownModule = "<unknown-file>";

24
lib/platforms.nix Normal file
View File

@@ -0,0 +1,24 @@
let lists = import ./lists.nix; in
rec {
all = linux ++ darwin ++ cygwin ++ freebsd ++ openbsd ++ netbsd ++ illumos;
allBut = platforms: lists.filter (x: !(builtins.elem x platforms)) all;
none = [];
arm = ["armv5tel-linux" "armv6l-linux" "armv7l-linux" ];
i686 = ["i686-linux" "i686-freebsd" "i686-netbsd" "i686-cygwin"];
mips = [ "mips64el-linux" ];
x86_64 = ["x86_64-linux" "x86_64-darwin" "x86_64-freebsd" "x86_64-openbsd" "x86_64-netbsd" "x86_64-cygwin"];
cygwin = ["i686-cygwin" "x86_64-cygwin"];
darwin = ["x86_64-darwin"];
freebsd = ["i686-freebsd" "x86_64-freebsd"];
gnu = linux; /* ++ hurd ++ kfreebsd ++ ... */
illumos = ["x86_64-solaris"];
linux = ["i686-linux" "x86_64-linux" "armv5tel-linux" "armv6l-linux" "armv7l-linux" "mips64el-linux"];
netbsd = ["i686-netbsd" "x86_64-netbsd"];
openbsd = ["i686-openbsd" "x86_64-openbsd"];
unix = linux ++ darwin ++ freebsd ++ openbsd ++ netbsd ++ illumos;
mesaPlatforms = ["i686-linux" "x86_64-linux" "x86_64-darwin" "armv5tel-linux" "armv6l-linux" "armv7l-linux"];
}

47
lib/sandbox.nix Normal file
View File

@@ -0,0 +1,47 @@
with import ./strings.nix;
/* Helpers for creating lisp S-exprs for the Apple sandbox
lib.sandbox.allowFileRead [ "/usr/bin/file" ];
# => "(allow file-read* (literal \"/usr/bin/file\"))";
lib.sandbox.allowFileRead {
literal = [ "/usr/bin/file" ];
subpath = [ "/usr/lib/system" ];
}
# => "(allow file-read* (literal \"/usr/bin/file\") (subpath \"/usr/lib/system\"))"
*/
let
sexp = tokens: "(" + builtins.concatStringsSep " " tokens + ")";
generateFileList = files:
if builtins.isList files
then concatMapStringsSep " " (x: sexp [ "literal" ''"${x}"'' ]) files
else if builtins.isString files
then generateFileList [ files ]
else concatStringsSep " " (
(map (x: sexp [ "literal" ''"${x}"'' ]) (files.literal or [])) ++
(map (x: sexp [ "subpath" ''"${x}"'' ]) (files.subpath or []))
);
applyToFiles = f: act: files: f "${act} ${generateFileList files}";
genActions = actionName: let
action = feature: sexp [ actionName feature ];
self = {
"${actionName}" = action;
"${actionName}File" = applyToFiles action "file*";
"${actionName}FileRead" = applyToFiles action "file-read*";
"${actionName}FileReadMetadata" = applyToFiles action "file-read-metadata";
"${actionName}DirectoryList" = self."${actionName}FileReadMetadata";
"${actionName}FileWrite" = applyToFiles action "file-write*";
"${actionName}FileWriteMetadata" = applyToFiles action "file-write-metadata";
};
in self;
in
genActions "allow" // genActions "deny" // {
importProfile = derivation: ''
(import "${derivation}")
'';
}

View File

@@ -1,60 +1,24 @@
# Functions for copying sources to the Nix store.
{ lib }:
let lib = import ./default.nix; in
rec {
# Returns the type of a path: regular (for file), symlink, or directory
pathType = p: with builtins; getAttr (baseNameOf p) (readDir (dirOf p));
# Returns true if the path exists and is a directory, false otherwise
pathIsDirectory = p: if builtins.pathExists p then (pathType p) == "directory" else false;
# Bring in a path as a source, filtering out all Subversion and CVS
# directories, as well as backup files (*~).
cleanSourceFilter = name: type: let baseName = baseNameOf (toString name); in ! (
# Filter out Subversion and CVS directories.
(type == "directory" && (baseName == ".git" || baseName == ".svn" || baseName == "CVS" || baseName == ".hg")) ||
# Filter out editor backup / swap files.
lib.hasSuffix "~" baseName ||
builtins.match "^\\.sw[a-z]$" baseName != null ||
builtins.match "^\\..*\\.sw[a-z]$" baseName != null ||
cleanSource =
let filter = name: type: let baseName = baseNameOf (toString name); in ! (
# Filter out Subversion and CVS directories.
(type == "directory" && (baseName == ".git" || baseName == ".svn" || baseName == "CVS" || baseName == ".hg")) ||
# Filter out backup files.
lib.hasSuffix "~" baseName ||
# Filter out generates files.
lib.hasSuffix ".o" baseName ||
lib.hasSuffix ".so" baseName
);
in src: builtins.filterSource filter src;
# Filter out generates files.
lib.hasSuffix ".o" baseName ||
lib.hasSuffix ".so" baseName ||
# Filter out nix-build result symlinks
(type == "symlink" && lib.hasPrefix "result" baseName)
);
cleanSource = src: cleanSourceWith { filter = cleanSourceFilter; inherit src; };
# Like `builtins.filterSource`, except it will compose with itself,
# allowing you to chain multiple calls together without any
# intermediate copies being put in the nix store.
#
# lib.cleanSourceWith f (lib.cleanSourceWith g ./.) # Succeeds!
# builtins.filterSource f (builtins.filterSource g ./.) # Fails!
cleanSourceWith = { filter, src }:
let
isFiltered = src ? _isLibCleanSourceWith;
origSrc = if isFiltered then src.origSrc else src;
filter' = if isFiltered then name: type: filter name type && src.filter name type else filter;
in {
inherit origSrc;
filter = filter';
outPath = builtins.filterSource filter' origSrc;
_isLibCleanSourceWith = true;
};
# Filter sources by a list of regular expressions.
#
# E.g. `src = sourceByRegex ./my-subproject [".*\.py$" "^database.sql$"]`
sourceByRegex = src: regexes: cleanSourceWith {
filter = (path: type:
let relPath = lib.removePrefix (toString src + "/") (toString path);
in lib.any (re: builtins.match re relPath != null) regexes);
inherit src;
};
# Get all files ending with the specified suffices from the given
# directory or its descendants. E.g. `sourceFilesBySuffices ./dir
@@ -63,38 +27,6 @@ rec {
let filter = name: type:
let base = baseNameOf (toString name);
in type == "directory" || lib.any (ext: lib.hasSuffix ext base) exts;
in cleanSourceWith { inherit filter; src = path; };
in builtins.filterSource filter path;
# Get the commit id of a git repo
# Example: commitIdFromGitRepo <nixpkgs/.git>
commitIdFromGitRepo =
let readCommitFromFile = path: file:
with builtins;
let fileName = toString path + "/" + file;
packedRefsName = toString path + "/packed-refs";
in if lib.pathExists fileName
then
let fileContent = lib.fileContents fileName;
# Sometimes git stores the commitId directly in the file but
# sometimes it stores something like: «ref: refs/heads/branch-name»
matchRef = match "^ref: (.*)$" fileContent;
in if isNull matchRef
then fileContent
else readCommitFromFile path (lib.head matchRef)
# Sometimes, the file isn't there at all and has been packed away in the
# packed-refs file, so we have to grep through it:
else if lib.pathExists packedRefsName
then
let fileContent = readFile packedRefsName;
matchRef = match (".*\n([^\n ]*) " + file + "\n.*") fileContent;
in if isNull matchRef
then throw ("Could not find " + file + " in " + packedRefsName)
else lib.head matchRef
else throw ("Not a .git directory: " + path);
in lib.flip readCommitFromFile "HEAD";
pathHasContext = builtins.hasContext or (lib.hasPrefix builtins.storeDir);
canCleanSource = src: src ? _isLibCleanSourceWith || !(pathHasContext (toString src));
}

View File

@@ -1,4 +1,3 @@
{ lib }:
/*
Usage:
@@ -16,7 +15,7 @@ Usage:
Attention:
let
pkgs = (import <nixpkgs>) {};
pkgs = (import /etc/nixos/nixpkgs/pkgs/top-level/all-packages.nix) {};
in let
inherit (pkgs.stringsWithDeps) fullDepEntry packEntry noDepEntry textClosureMap;
inherit (pkgs.lib) id;
@@ -41,9 +40,9 @@ Usage:
[1] maybe this behaviour should be removed to keep things simple (?)
*/
with lib.lists;
with lib.attrsets;
with lib.strings;
with import ./lists.nix;
with import ./attrsets.nix;
with import ./strings.nix;
rec {

View File

@@ -1,6 +1,6 @@
/* String manipulation functions. */
{ lib }:
let
let lib = import ./default.nix;
inherit (builtins) length;
@@ -10,310 +10,100 @@ rec {
inherit (builtins) stringLength substring head tail isString replaceStrings;
/* Concatenate a list of strings.
Type: concatStrings :: [string] -> string
# Concatenate a list of strings.
concatStrings =
if builtins ? concatStringsSep then
builtins.concatStringsSep ""
else
lib.foldl' (x: y: x + y) "";
Example:
concatStrings ["foo" "bar"]
=> "foobar"
*/
concatStrings = builtins.concatStringsSep "";
/* Map a function over a list and concatenate the resulting strings.
Type: concatMapStrings :: (a -> string) -> [a] -> string
Example:
concatMapStrings (x: "a" + x) ["foo" "bar"]
=> "afooabar"
*/
# Map a function over a list and concatenate the resulting strings.
concatMapStrings = f: list: concatStrings (map f list);
concatImapStrings = f: list: concatStrings (lib.imap f list);
/* Like `concatMapStrings` except that the f functions also gets the
position as a parameter.
Type: concatImapStrings :: (int -> a -> string) -> [a] -> string
Example:
concatImapStrings (pos: x: "${toString pos}-${x}") ["foo" "bar"]
=> "1-foo2-bar"
*/
concatImapStrings = f: list: concatStrings (lib.imap1 f list);
/* Place an element between each element of a list
Type: intersperse :: a -> [a] -> [a]
Example:
intersperse "/" ["usr" "local" "bin"]
=> ["usr" "/" "local" "/" "bin"].
*/
intersperse =
# Separator to add between elements
separator:
# Input list
list:
# Place an element between each element of a list, e.g.,
# `intersperse "," ["a" "b" "c"]' returns ["a" "," "b" "," "c"].
intersperse = separator: list:
if list == [] || length list == 1
then list
else tail (lib.concatMap (x: [separator x]) list);
/* Concatenate a list of strings with a separator between each element
Type: concatStringsSep :: string -> [string] -> string
Example:
concatStringsSep "/" ["usr" "local" "bin"]
=> "usr/local/bin"
*/
# Concatenate a list of strings with a separator between each element, e.g.
# concatStringsSep " " ["foo" "bar" "xyzzy"] == "foo bar xyzzy"
concatStringsSep = builtins.concatStringsSep or (separator: list:
concatStrings (intersperse separator list));
/* Maps a function over a list of strings and then concatenates the
result with the specified separator interspersed between
elements.
Type: concatMapStringsSep :: string -> (string -> string) -> [string] -> string
Example:
concatMapStringsSep "-" (x: toUpper x) ["foo" "bar" "baz"]
=> "FOO-BAR-BAZ"
*/
concatMapStringsSep =
# Separator to add between elements
sep:
# Function to map over the list
f:
# List of input strings
list: concatStringsSep sep (map f list);
/* Same as `concatMapStringsSep`, but the mapping function
additionally receives the position of its argument.
Type: concatMapStringsSep :: string -> (int -> string -> string) -> [string] -> string
Example:
concatImapStringsSep "-" (pos: x: toString (x / pos)) [ 6 6 6 ]
=> "6-3-2"
*/
concatImapStringsSep =
# Separator to add between elements
sep:
# Function that receives elements and their positions
f:
# List of input strings
list: concatStringsSep sep (lib.imap1 f list);
/* Construct a Unix-style, colon-separated search path consisting of
the given `subDir` appended to each of the given paths.
Type: makeSearchPath :: string -> [string] -> string
Example:
makeSearchPath "bin" ["/root" "/usr" "/usr/local"]
=> "/root/bin:/usr/bin:/usr/local/bin"
makeSearchPath "bin" [""]
=> "/bin"
*/
makeSearchPath =
# Directory name to append
subDir:
# List of base paths
paths:
concatStringsSep ":" (map (path: path + "/" + subDir) (builtins.filter (x: x != null) paths));
/* Construct a Unix-style search path by appending the given
`subDir` to the specified `output` of each of the packages. If no
output by the given name is found, fallback to `.out` and then to
the default.
Type: string -> string -> [package] -> string
Example:
makeSearchPathOutput "dev" "bin" [ pkgs.openssl pkgs.zlib ]
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev/bin:/nix/store/wwh7mhwh269sfjkm6k5665b5kgp7jrk2-zlib-1.2.8/bin"
*/
makeSearchPathOutput =
# Package output to use
output:
# Directory name to append
subDir:
# List of packages
pkgs: makeSearchPath subDir (map (lib.getOutput output) pkgs);
/* Construct a library search path (such as RPATH) containing the
libraries for a set of packages
Example:
makeLibraryPath [ "/usr" "/usr/local" ]
=> "/usr/lib:/usr/local/lib"
pkgs = import <nixpkgs> { }
makeLibraryPath [ pkgs.openssl pkgs.zlib ]
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r/lib:/nix/store/wwh7mhwh269sfjkm6k5665b5kgp7jrk2-zlib-1.2.8/lib"
*/
makeLibraryPath = makeSearchPathOutput "lib" "lib";
/* Construct a binary search path (such as $PATH) containing the
binaries for a set of packages.
Example:
makeBinPath ["/root" "/usr" "/usr/local"]
=> "/root/bin:/usr/bin:/usr/local/bin"
*/
makeBinPath = makeSearchPathOutput "bin" "bin";
concatMapStringsSep = sep: f: list: concatStringsSep sep (map f list);
concatImapStringsSep = sep: f: list: concatStringsSep sep (lib.imap f list);
/* Construct a perl search path (such as $PERL5LIB)
# Construct a Unix-style search path consisting of each `subDir"
# directory of the given list of packages. For example,
# `makeSearchPath "bin" ["x" "y" "z"]' returns "x/bin:y/bin:z/bin".
makeSearchPath = subDir: packages:
concatStringsSep ":" (map (path: path + "/" + subDir) packages);
Example:
pkgs = import <nixpkgs> { }
makePerlPath [ pkgs.perlPackages.libnet ]
=> "/nix/store/n0m1fk9c960d8wlrs62sncnadygqqc6y-perl-Net-SMTP-1.25/lib/perl5/site_perl"
*/
# FIXME(zimbatm): this should be moved in perl-specific code
makePerlPath = makeSearchPathOutput "lib" "lib/perl5/site_perl";
/* Construct a perl search path recursively including all dependencies (such as $PERL5LIB)
# Construct a library search path (such as RPATH) containing the
# libraries for a set of packages, e.g. "${pkg1}/lib:${pkg2}/lib:...".
makeLibraryPath = makeSearchPath "lib";
Example:
pkgs = import <nixpkgs> { }
makeFullPerlPath [ pkgs.perlPackages.CGI ]
=> "/nix/store/fddivfrdc1xql02h9q500fpnqy12c74n-perl-CGI-4.38/lib/perl5/site_perl:/nix/store/8hsvdalmsxqkjg0c5ifigpf31vc4vsy2-perl-HTML-Parser-3.72/lib/perl5/site_perl:/nix/store/zhc7wh0xl8hz3y3f71nhlw1559iyvzld-perl-HTML-Tagset-3.20/lib/perl5/site_perl"
*/
makeFullPerlPath = deps: makePerlPath (lib.misc.closePropagation deps);
# Construct a binary search path (such as $PATH) containing the
# binaries for a set of packages, e.g. "${pkg1}/bin:${pkg2}/bin:...".
makeBinPath = makeSearchPath "bin";
/* Depending on the boolean `cond', return either the given string
or the empty string. Useful to concatenate against a bigger string.
Type: optionalString :: bool -> string -> string
# Idem for Perl search paths.
makePerlPath = makeSearchPath "lib/perl5/site_perl";
Example:
optionalString true "some-string"
=> "some-string"
optionalString false "some-string"
=> ""
*/
optionalString =
# Condition
cond:
# String to return if condition is true
string: if cond then string else "";
/* Determine whether a string has given prefix.
# Dependening on the boolean `cond', return either the given string
# or the empty string.
optionalString = cond: string: if cond then string else "";
Type: hasPrefix :: string -> string -> bool
Example:
hasPrefix "foo" "foobar"
=> true
hasPrefix "foo" "barfoo"
=> false
*/
hasPrefix =
# Prefix to check for
pref:
# Input string
str: substring 0 (stringLength pref) str == pref;
/* Determine whether a string has given suffix.
Type: hasSuffix :: string -> string -> bool
Example:
hasSuffix "foo" "foobar"
=> false
hasSuffix "foo" "barfoo"
=> true
*/
hasSuffix =
# Suffix to check for
suffix:
# Input string
content:
# Determine whether a string has given prefix/suffix.
hasPrefix = pref: str:
substring 0 (stringLength pref) str == pref;
hasSuffix = suff: str:
let
lenContent = stringLength content;
lenSuffix = stringLength suffix;
in lenContent >= lenSuffix &&
substring (lenContent - lenSuffix) lenContent content == suffix;
lenStr = stringLength str;
lenSuff = stringLength suff;
in lenStr >= lenSuff &&
substring (lenStr - lenSuff) lenStr str == suff;
/* Convert a string to a list of characters (i.e. singleton strings).
This allows you to, e.g., map a function over each character. However,
note that this will likely be horribly inefficient; Nix is not a
general purpose programming language. Complex string manipulations
should, if appropriate, be done in a derivation.
Also note that Nix treats strings as a list of bytes and thus doesn't
handle unicode.
Type: stringtoCharacters :: string -> [string]
Example:
stringToCharacters ""
=> [ ]
stringToCharacters "abc"
=> [ "a" "b" "c" ]
stringToCharacters "💩"
=> [ "<EFBFBD>" "<EFBFBD>" "<EFBFBD>" "<EFBFBD>" ]
*/
# Convert a string to a list of characters (i.e. singleton strings).
# For instance, "abc" becomes ["a" "b" "c"]. This allows you to,
# e.g., map a function over each character. However, note that this
# will likely be horribly inefficient; Nix is not a general purpose
# programming language. Complex string manipulations should, if
# appropriate, be done in a derivation.
stringToCharacters = s:
map (p: substring p 1 s) (lib.range 0 (stringLength s - 1));
/* Manipulate a string character by character and replace them by
strings before concatenating the results.
Type: stringAsChars :: (string -> string) -> string -> string
Example:
stringAsChars (x: if x == "a" then "i" else x) "nax"
=> "nix"
*/
stringAsChars =
# Function to map over each individual character
f:
# Input string
s: concatStrings (
# Manipulate a string charactter by character and replace them by
# strings before concatenating the results.
stringAsChars = f: s:
concatStrings (
map f (stringToCharacters s)
);
/* Escape occurrence of the elements of `list` in `string` by
prefixing it with a backslash.
Type: escape :: [string] -> string -> string
Example:
escape ["(" ")"] "(foo)"
=> "\\(foo\\)"
*/
# Escape occurrence of the elements of list in string by
# prefixing it with a backslash. For example, escape ["(" ")"]
# "(foo)" returns the string \(foo\).
escape = list: replaceChars list (map (c: "\\${c}") list);
/* Quote string to be used safely within the Bourne shell.
Type: escapeShellArg :: string -> string
# Escape all characters that have special meaning in the Bourne shell.
escapeShellArg = lib.escape (stringToCharacters "\\ ';$`()|<>\t*[]");
Example:
escapeShellArg "esc'ape\nme"
=> "'esc'\\''ape\nme'"
*/
escapeShellArg = arg: "'${replaceStrings ["'"] ["'\\''"] (toString arg)}'";
/* Quote all arguments to be safely passed to the Bourne shell.
Type: escapeShellArgs :: [string] -> string
Example:
escapeShellArgs ["one" "two three" "four'five"]
=> "'one' 'two three' 'four'\\''five'"
*/
escapeShellArgs = concatMapStringsSep " " escapeShellArg;
/* Turn a string into a Nix expression representing that string
Type: string -> string
Example:
escapeNixString "hello\${}\n"
=> "\"hello\\\${}\\n\""
*/
escapeNixString = s: escape ["$"] (builtins.toJSON s);
# Obsolete - use replaceStrings instead.
replaceChars = builtins.replaceStrings or (
@@ -329,56 +119,21 @@ rec {
in
stringAsChars subst s);
# Case conversion utilities.
lowerChars = stringToCharacters "abcdefghijklmnopqrstuvwxyz";
upperChars = stringToCharacters "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
/* Converts an ASCII string to lower-case.
Type: toLower :: string -> string
Example:
toLower "HOME"
=> "home"
*/
toLower = replaceChars upperChars lowerChars;
/* Converts an ASCII string to upper-case.
Type: toUpper :: string -> string
Example:
toUpper "home"
=> "HOME"
*/
toUpper = replaceChars lowerChars upperChars;
/* Appends string context from another string. This is an implementation
detail of Nix.
Strings in Nix carry an invisible `context` which is a list of strings
representing store paths. If the string is later used in a derivation
attribute, the derivation will properly populate the inputDrvs and
inputSrcs.
Example:
pkgs = import <nixpkgs> { };
addContextFrom pkgs.coreutils "bar"
=> "bar"
*/
# Appends string context from another string.
addContextFrom = a: b: substring 0 0 a + b;
/* Cut a string with a separator and produces a list of strings which
were separated by this separator.
NOTE: this function is not performant and should never be used.
Example:
splitString "." "foo.bar.baz"
=> [ "foo" "bar" "baz" ]
splitString "/" "/usr/local/bin"
=> [ "" "usr" "local" "bin" ]
*/
# Cut a string with a separator and produces a list of strings which
# were separated by this separator; e.g., `splitString "."
# "foo.bar.baz"' returns ["foo" "bar" "baz"].
splitString = _sep: _s:
let
sep = addContextFrom _s _sep;
@@ -391,7 +146,7 @@ rec {
recurse = index: startAt:
let cutUntil = i: [(substring startAt (i - startAt) s)]; in
if index <= lastSearch then
if index < lastSearch then
if startWithSep index then
let restartAt = index + sepLen; in
cutUntil index ++ recurse restartAt restartAt
@@ -402,102 +157,49 @@ rec {
in
recurse 0 0;
/* Return a string without the specified prefix, if the prefix matches.
Type: string -> string -> string
Example:
removePrefix "foo." "foo.bar.baz"
=> "bar.baz"
removePrefix "xxx" "foo.bar.baz"
=> "foo.bar.baz"
*/
removePrefix =
# Prefix to remove if it matches
prefix:
# Input string
str:
# return the suffix of the second argument if the first argument match its
# prefix. e.g.,
# `removePrefix "foo." "foo.bar.baz"' returns "bar.baz".
removePrefix = pre: s:
let
preLen = stringLength prefix;
sLen = stringLength str;
preLen = stringLength pre;
sLen = stringLength s;
in
if hasPrefix prefix str then
substring preLen (sLen - preLen) str
if hasPrefix pre s then
substring preLen (sLen - preLen) s
else
str;
s;
/* Return a string without the specified suffix, if the suffix matches.
Type: string -> string -> string
Example:
removeSuffix "front" "homefront"
=> "home"
removeSuffix "xxx" "homefront"
=> "homefront"
*/
removeSuffix =
# Suffix to remove if it matches
suffix:
# Input string
str:
removeSuffix = suf: s:
let
sufLen = stringLength suffix;
sLen = stringLength str;
sufLen = stringLength suf;
sLen = stringLength s;
in
if sufLen <= sLen && suffix == substring (sLen - sufLen) sufLen str then
substring 0 (sLen - sufLen) str
if sufLen <= sLen && suf == substring (sLen - sufLen) sufLen s then
substring 0 (sLen - sufLen) s
else
str;
s;
/* Return true if string v1 denotes a version older than v2.
Example:
versionOlder "1.1" "1.2"
=> true
versionOlder "1.1" "1.1"
=> false
*/
# Return true iff string v1 denotes a version older than v2.
versionOlder = v1: v2: builtins.compareVersions v2 v1 == 1;
/* Return true if string v1 denotes a version equal to or newer than v2.
Example:
versionAtLeast "1.1" "1.0"
=> true
versionAtLeast "1.1" "1.1"
=> true
versionAtLeast "1.1" "1.2"
=> false
*/
# Return true iff string v1 denotes a version equal to or newer than v2.
versionAtLeast = v1: v2: !versionOlder v1 v2;
/* This function takes an argument that's either a derivation or a
derivation's "name" attribute and extracts the version part from that
argument.
Example:
getVersion "youtube-dl-2016.01.01"
=> "2016.01.01"
getVersion pkgs.youtube-dl
=> "2016.01.01"
*/
getVersion = x:
let
parse = drv: (builtins.parseDrvName drv).version;
in if isString x
then parse x
else x.version or (parse x.name);
# This function takes an argument that's either a derivation or a
# derivation's "name" attribute and extracts the version part from that
# argument. For example:
#
# lib.getVersion "youtube-dl-2016.01.01" ==> "2016.01.01"
# lib.getVersion pkgs.youtube-dl ==> "2016.01.01"
getVersion = x: (builtins.parseDrvName (x.name or x)).version;
/* Extract name with version from URL. Ask for separator which is
supposed to start extension.
Example:
nameFromURL "https://nixos.org/releases/nix/nix-1.7/nix-1.7-x86_64-linux.tar.bz2" "-"
=> "nix"
nameFromURL "https://nixos.org/releases/nix/nix-1.7/nix-1.7-x86_64-linux.tar.bz2" "_"
=> "nix-1.7-x86"
*/
# Extract name with version from URL. Ask for separator which is
# supposed to start extension.
nameFromURL = url: sep:
let
components = splitString "/" url;
@@ -505,62 +207,14 @@ rec {
name = builtins.head (splitString sep filename);
in assert name != filename; name;
/* Create an --{enable,disable}-<feat> string that can be passed to
standard GNU Autoconf scripts.
Example:
enableFeature true "shared"
=> "--enable-shared"
enableFeature false "shared"
=> "--disable-shared"
*/
# Create an --{enable,disable}-<feat> string that can be passed to
# standard GNU Autoconf scripts.
enableFeature = enable: feat: "--${if enable then "enable" else "disable"}-${feat}";
/* Create an --{enable-<feat>=<value>,disable-<feat>} string that can be passed to
standard GNU Autoconf scripts.
Example:
enableFeature true "shared" "foo"
=> "--enable-shared=foo"
enableFeature false "shared" (throw "ignored")
=> "--disable-shared"
*/
enableFeatureAs = enable: feat: value: enableFeature enable feat + optionalString enable "=${value}";
/* Create an --{with,without}-<feat> string that can be passed to
standard GNU Autoconf scripts.
Example:
withFeature true "shared"
=> "--with-shared"
withFeature false "shared"
=> "--without-shared"
*/
withFeature = with_: feat: "--${if with_ then "with" else "without"}-${feat}";
/* Create an --{with-<feat>=<value>,without-<feat>} string that can be passed to
standard GNU Autoconf scripts.
Example:
with_Feature true "shared" "foo"
=> "--with-shared=foo"
with_Feature false "shared" (throw "ignored")
=> "--without-shared"
*/
withFeatureAs = with_: feat: value: withFeature with_ feat + optionalString with_ "=${value}";
/* Create a fixed width string with additional prefix to match
required width.
This function will fail if the input string is longer than the
requested length.
Type: fixedWidthString :: int -> string -> string
Example:
fixedWidthString 5 "0" (toString 15)
=> "00015"
*/
# Create a fixed width string with additional prefix to match
# required width.
fixedWidthString = width: filler: str:
let
strw = lib.stringLength str;
@@ -569,91 +223,35 @@ rec {
assert strw <= width;
if strw == width then str else filler + fixedWidthString reqWidth filler str;
/* Format a number adding leading zeroes up to fixed width.
Example:
fixedWidthNumber 5 15
=> "00015"
*/
# Format a number adding leading zeroes up to fixed width.
fixedWidthNumber = width: n: fixedWidthString width "0" (toString n);
/* Check whether a value can be coerced to a string */
isCoercibleToString = x:
builtins.elem (builtins.typeOf x) [ "path" "string" "null" "int" "float" "bool" ] ||
(builtins.isList x && lib.all isCoercibleToString x) ||
x ? outPath ||
x ? __toString;
/* Check whether a value is a store path.
# Check whether a value is a store path.
isStorePath = x: builtins.substring 0 1 (toString x) == "/" && dirOf (builtins.toPath x) == builtins.storeDir;
Example:
isStorePath "/nix/store/d945ibfx9x185xf04b890y4f9g3cbb63-python-2.7.11/bin/python"
=> false
isStorePath "/nix/store/d945ibfx9x185xf04b890y4f9g3cbb63-python-2.7.11/"
=> true
isStorePath pkgs.python
=> true
isStorePath [] || isStorePath 42 || isStorePath {} ||
=> false
*/
isStorePath = x:
isCoercibleToString x
&& builtins.substring 0 1 (toString x) == "/"
&& dirOf (builtins.toPath x) == builtins.storeDir;
/* Parse a string string as an int.
Type: string -> int
Example:
toInt "1337"
=> 1337
toInt "-4"
=> -4
toInt "3.14"
=> error: floating point JSON numbers are not supported
*/
# Obviously, it is a bit hacky to use fromJSON this way.
# Convert string to int
# Obviously, it is a bit hacky to use fromJSON that way.
toInt = str:
let may_be_int = builtins.fromJSON str; in
if builtins.isInt may_be_int
then may_be_int
else throw "Could not convert ${str} to int.";
/* Read a list of paths from `file`, relative to the `rootPath`.
Lines beginning with `#` are treated as comments and ignored.
Whitespace is significant.
NOTE: This function is not performant and should be avoided.
Example:
readPathsFromFile /prefix
./pkgs/development/libraries/qt-5/5.4/qtbase/series
=> [ "/prefix/dlopen-resolv.patch" "/prefix/tzdir.patch"
"/prefix/dlopen-libXcursor.patch" "/prefix/dlopen-openssl.patch"
"/prefix/dlopen-dbus.patch" "/prefix/xdg-config-dirs.patch"
"/prefix/nix-profiles-library-paths.patch"
"/prefix/compose-search-path.patch" ]
*/
# Read a list of paths from `file', relative to the `rootPath'. Lines
# beginning with `#' are treated as comments and ignored. Whitespace
# is significant.
readPathsFromFile = rootPath: file:
let
root = toString rootPath;
lines = lib.splitString "\n" (builtins.readFile file);
removeComments = lib.filter (line: line != "" && !(lib.hasPrefix "#" line));
lines =
builtins.map (lib.removeSuffix "\n")
(lib.splitString "\n" (builtins.readFile file));
removeComments = lib.filter (line: !(lib.hasPrefix "#" line));
relativePaths = removeComments lines;
absolutePaths = builtins.map (path: builtins.toPath (root + "/" + path)) relativePaths;
in
absolutePaths;
/* Read the contents of a file removing the trailing \n
Type: fileContents :: path -> string
Example:
$ echo "1.0" > ./version
fileContents ./version
=> "1.0"
*/
fileContents = file: removeSuffix "\n" (builtins.readFile file);
}

126
lib/systems.nix Normal file
View File

@@ -0,0 +1,126 @@
# Define the list of system with their properties. Only systems tested for
# Nixpkgs are listed below
with import ./lists.nix;
with import ./types.nix;
with import ./attrsets.nix;
let
lib = import ./default.nix;
setTypes = type:
mapAttrs (name: value:
setType type ({inherit name;} // value)
);
in
rec {
isSignificantByte = isType "significant-byte";
significantBytes = setTypes "significant-byte" {
bigEndian = {};
littleEndian = {};
};
isCpuType = x: isType "cpu-type" x
&& elem x.bits [8 16 32 64 128]
&& (8 < x.bits -> isSignificantByte x.significantByte);
cpuTypes = with significantBytes;
setTypes "cpu-type" {
arm = { bits = 32; significantByte = littleEndian; };
armv5tel = { bits = 32; significantByte = littleEndian; };
armv7l = { bits = 32; significantByte = littleEndian; };
i686 = { bits = 32; significantByte = littleEndian; };
powerpc = { bits = 32; significantByte = bigEndian; };
x86_64 = { bits = 64; significantByte = littleEndian; };
};
isExecFormat = isType "exec-format";
execFormats = setTypes "exec-format" {
aout = {}; # a.out
elf = {};
macho = {};
pe = {};
unknow = {};
};
isKernel = isType "kernel";
kernels = with execFormats;
setTypes "kernel" {
cygwin = { execFormat = pe; };
darwin = { execFormat = macho; };
freebsd = { execFormat = elf; };
linux = { execFormat = elf; };
netbsd = { execFormat = elf; };
none = { execFormat = unknow; };
openbsd = { execFormat = elf; };
win32 = { execFormat = pe; };
};
isArchitecture = isType "architecture";
architectures = setTypes "architecture" {
apple = {};
pc = {};
unknow = {};
};
isSystem = x: isType "system" x
&& isCpuType x.cpu
&& isArchitecture x.arch
&& isKernel x.kernel;
mkSystem = {
cpu ? cpuTypes.i686,
arch ? architectures.pc,
kernel ? kernels.linux,
name ? "${cpu.name}-${arch.name}-${kernel.name}"
}: setType "system" {
inherit name cpu arch kernel;
};
is64Bit = matchAttrs { cpu = { bits = 64; }; };
isDarwin = matchAttrs { kernel = kernels.darwin; };
isi686 = matchAttrs { cpu = cpuTypes.i686; };
isLinux = matchAttrs { kernel = kernels.linux; };
# This should revert the job done by config.guess from the gcc compiler.
mkSystemFromString = s: let
l = lib.splitString "-" s;
getCpu = name:
attrByPath [name] (throw "Unknow cpuType `${name}'.")
cpuTypes;
getArch = name:
attrByPath [name] (throw "Unknow architecture `${name}'.")
architectures;
getKernel = name:
attrByPath [name] (throw "Unknow kernel `${name}'.")
kernels;
system =
if builtins.length l == 2 then
mkSystem rec {
name = s;
cpu = getCpu (head l);
arch =
if isDarwin system
then architectures.apple
else architectures.pc;
kernel = getKernel (head (tail l));
}
else
mkSystem {
name = s;
cpu = getCpu (head l);
arch = getArch (head (tail l));
kernel = getKernel (head (tail (tail l)));
};
in assert isSystem system; system;
}

View File

@@ -1,60 +0,0 @@
{ lib }:
let inherit (lib.attrsets) mapAttrs; in
rec {
doubles = import ./doubles.nix { inherit lib; };
forMeta = import ./for-meta.nix { inherit lib; };
parse = import ./parse.nix { inherit lib; };
inspect = import ./inspect.nix { inherit lib; };
platforms = import ./platforms.nix { inherit lib; };
examples = import ./examples.nix { inherit lib; };
# Elaborate a `localSystem` or `crossSystem` so that it contains everything
# necessary.
#
# `parsed` is inferred from args, both because there are two options with one
# clearly prefered, and to prevent cycles. A simpler fixed point where the RHS
# always just used `final.*` would fail on both counts.
elaborate = args: let
final = {
# Prefer to parse `config` as it is strictly more informative.
parsed = parse.mkSystemFromString (if args ? config then args.config else args.system);
# Either of these can be losslessly-extracted from `parsed` iff parsing succeeds.
system = parse.doubleFromSystem final.parsed;
config = parse.tripleFromSystem final.parsed;
# Just a guess, based on `system`
platform = platforms.selectBySystem final.system;
# Derived meta-data
libc =
/**/ if final.isDarwin then "libSystem"
else if final.isMinGW then "msvcrt"
else if final.isMusl then "musl"
else if final.isUClibc then "uclibc"
else if final.isAndroid then "bionic"
else if final.isLinux /* default */ then "glibc"
# TODO(@Ericson2314) think more about other operating systems
else "native/impure";
extensions = {
sharedLibrary =
/**/ if final.isDarwin then ".dylib"
else if final.isWindows then ".dll"
else ".so";
executable =
/**/ if final.isWindows then ".exe"
else "";
};
# Misc boolean options
useAndroidPrebuilt = false;
useiOSPrebuilt = false;
} // mapAttrs (n: v: v final.parsed) inspect.predicates
// args;
in assert final.useAndroidPrebuilt -> final.isAndroid;
assert lib.foldl
(pass: { assertion, message }:
if assertion final
then pass
else throw message)
true
(final.parsed.abi.assertions or []);
final;
}

View File

@@ -1,48 +0,0 @@
{ lib }:
let
inherit (lib) lists;
inherit (lib.systems) parse;
inherit (lib.systems.inspect) predicates;
inherit (lib.attrsets) matchAttrs;
all = [
"aarch64-linux"
"armv5tel-linux" "armv6l-linux" "armv7l-linux"
"mipsel-linux"
"i686-cygwin" "i686-freebsd" "i686-linux" "i686-netbsd" "i686-openbsd"
"x86_64-cygwin" "x86_64-darwin" "x86_64-freebsd" "x86_64-linux"
"x86_64-netbsd" "x86_64-openbsd" "x86_64-solaris"
];
allParsed = map parse.mkSystemFromString all;
filterDoubles = f: map parse.doubleFromSystem (lists.filter f allParsed);
in rec {
inherit all;
none = [];
arm = filterDoubles predicates.isAarch32;
aarch64 = filterDoubles predicates.isAarch64;
x86 = filterDoubles predicates.isx86;
i686 = filterDoubles predicates.isi686;
x86_64 = filterDoubles predicates.isx86_64;
mips = filterDoubles predicates.isMips;
cygwin = filterDoubles predicates.isCygwin;
darwin = filterDoubles predicates.isDarwin;
freebsd = filterDoubles predicates.isFreeBSD;
# Should be better, but MinGW is unclear.
gnu = filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnu; });
illumos = filterDoubles predicates.isSunOS;
linux = filterDoubles predicates.isLinux;
netbsd = filterDoubles predicates.isNetBSD;
openbsd = filterDoubles predicates.isOpenBSD;
unix = filterDoubles predicates.isUnix;
mesaPlatforms = ["i686-linux" "x86_64-linux" "x86_64-darwin" "armv5tel-linux" "armv6l-linux" "armv7l-linux" "aarch64-linux" "powerpc64le-linux"];
}

View File

@@ -1,165 +0,0 @@
# These can be passed to nixpkgs as either the `localSystem` or
# `crossSystem`. They are put here for user convenience, but also used by cross
# tests and linux cross stdenv building, so handle with care!
{ lib }:
let platforms = import ./platforms.nix { inherit lib; }; in
rec {
#
# Linux
#
powernv = {
config = "powerpc64le-unknown-linux-gnu";
platform = platforms.powernv;
};
musl-power = {
config = "powerpc64le-unknown-linux-musl";
platform = platforms.powernv;
};
sheevaplug = rec {
config = "armv5tel-unknown-linux-gnueabi";
platform = platforms.sheevaplug;
};
raspberryPi = rec {
config = "armv6l-unknown-linux-gnueabihf";
platform = platforms.raspberrypi;
};
armv7l-hf-multiplatform = rec {
config = "armv7a-unknown-linux-gnueabihf";
platform = platforms.armv7l-hf-multiplatform;
};
aarch64-multiplatform = rec {
config = "aarch64-unknown-linux-gnu";
platform = platforms.aarch64-multiplatform;
};
armv5te-android-prebuilt = rec {
config = "armv5tel-unknown-linux-androideabi";
sdkVer = "21";
ndkVer = "10e";
platform = platforms.armv5te-android;
useAndroidPrebuilt = true;
};
armv7a-android-prebuilt = rec {
config = "armv7a-unknown-linux-androideabi";
sdkVer = "24";
ndkVer = "17c";
platform = platforms.armv7a-android;
useAndroidPrebuilt = true;
};
aarch64-android-prebuilt = rec {
config = "aarch64-unknown-linux-android";
sdkVer = "24";
ndkVer = "17c";
platform = platforms.aarch64-multiplatform;
useAndroidPrebuilt = true;
};
scaleway-c1 = armv7l-hf-multiplatform // rec {
platform = platforms.scaleway-c1;
inherit (platform.gcc) fpu;
};
pogoplug4 = rec {
config = "armv5tel-unknown-linux-gnueabi";
platform = platforms.pogoplug4;
};
ben-nanonote = rec {
config = "mipsel-unknown-linux-uclibc";
platform = platforms.ben_nanonote;
};
fuloongminipc = rec {
config = "mipsel-unknown-linux-gnu";
platform = platforms.fuloong2f_n32;
};
muslpi = raspberryPi // {
config = "armv6l-unknown-linux-musleabihf";
};
aarch64-multiplatform-musl = aarch64-multiplatform // {
config = "aarch64-unknown-linux-musl";
};
musl64 = { config = "x86_64-unknown-linux-musl"; };
musl32 = { config = "i686-unknown-linux-musl"; };
riscv = bits: {
config = "riscv${bits}-unknown-linux-gnu";
platform = platforms.riscv-multiplatform bits;
};
riscv64 = riscv "64";
riscv32 = riscv "32";
#
# Darwin
#
iphone64 = {
config = "aarch64-apple-ios";
# config = "aarch64-apple-darwin14";
sdkVer = "10.2";
xcodeVer = "8.2";
xcodePlatform = "iPhoneOS";
useiOSPrebuilt = true;
platform = {};
};
iphone32 = {
config = "armv7a-apple-ios";
# config = "arm-apple-darwin10";
sdkVer = "10.2";
xcodeVer = "8.2";
xcodePlatform = "iPhoneOS";
useiOSPrebuilt = true;
platform = {};
};
iphone64-simulator = {
config = "x86_64-apple-ios";
# config = "x86_64-apple-darwin14";
sdkVer = "10.2";
xcodeVer = "8.2";
xcodePlatform = "iPhoneSimulator";
useiOSPrebuilt = true;
platform = {};
};
iphone32-simulator = {
config = "i686-apple-ios";
# config = "i386-apple-darwin11";
sdkVer = "10.2";
xcodeVer = "8.2";
xcodePlatform = "iPhoneSimulator";
useiOSPrebuilt = true;
platform = {};
};
#
# Windows
#
# 32 bit mingw-w64
mingw32 = {
config = "i686-pc-mingw32";
libc = "msvcrt"; # This distinguishes the mingw (non posix) toolchain
platform = {};
};
# 64 bit mingw-w64
mingwW64 = {
# That's the triplet they use in the mingw-w64 docs.
config = "x86_64-pc-mingw32";
libc = "msvcrt"; # This distinguishes the mingw (non posix) toolchain
platform = {};
};
}

View File

@@ -1,37 +0,0 @@
{ lib }:
let
inherit (lib.systems) parse;
inherit (lib.systems.inspect) patterns;
abis = lib.mapAttrs (_: abi: builtins.removeAttrs abi [ "assertions" ]) parse.abis;
in rec {
all = [ {} ]; # `{}` matches anything
none = [];
arm = [ patterns.isAarch32 ];
aarch64 = [ patterns.isAarch64 ];
x86 = [ patterns.isx86 ];
i686 = [ patterns.isi686 ];
x86_64 = [ patterns.isx86_64 ];
mips = [ patterns.isMips ];
riscv = [ patterns.isRiscV ];
cygwin = [ patterns.isCygwin ];
darwin = [ patterns.isDarwin ];
freebsd = [ patterns.isFreeBSD ];
# Should be better, but MinGW is unclear.
gnu = [
{ kernel = parse.kernels.linux; abi = abis.gnu; }
{ kernel = parse.kernels.linux; abi = abis.gnueabi; }
{ kernel = parse.kernels.linux; abi = abis.gnueabihf; }
];
illumos = [ patterns.isSunOS ];
linux = [ patterns.isLinux ];
netbsd = [ patterns.isNetBSD ];
openbsd = [ patterns.isOpenBSD ];
unix = patterns.isUnix; # Actually a list
windows = [ patterns.isWindows ];
inherit (lib.systems.doubles) mesaPlatforms;
}

View File

@@ -1,59 +0,0 @@
{ lib }:
with import ./parse.nix { inherit lib; };
with lib.attrsets;
with lib.lists;
let abis_ = abis; in
let abis = lib.mapAttrs (_: abi: builtins.removeAttrs abi [ "assertions" ]) abis_; in
rec {
patterns = rec {
isi686 = { cpu = cpuTypes.i686; };
isx86_64 = { cpu = cpuTypes.x86_64; };
isPowerPC = { cpu = cpuTypes.powerpc; };
isPower = { cpu = { family = "power"; }; };
isx86 = { cpu = { family = "x86"; }; };
isAarch32 = { cpu = { family = "arm"; bits = 32; }; };
isAarch64 = { cpu = { family = "arm"; bits = 64; }; };
isMips = { cpu = { family = "mips"; }; };
isRiscV = { cpu = { family = "riscv"; }; };
isSparc = { cpu = { family = "sparc"; }; };
isWasm = { cpu = { family = "wasm"; }; };
is32bit = { cpu = { bits = 32; }; };
is64bit = { cpu = { bits = 64; }; };
isBigEndian = { cpu = { significantByte = significantBytes.bigEndian; }; };
isLittleEndian = { cpu = { significantByte = significantBytes.littleEndian; }; };
isBSD = { kernel = { families = { inherit (kernelFamilies) bsd; }; }; };
isDarwin = { kernel = { families = { inherit (kernelFamilies) darwin; }; }; };
isUnix = [ isBSD isDarwin isLinux isSunOS isCygwin ];
isMacOS = { kernel = kernels.macos; };
isiOS = { kernel = kernels.ios; };
isLinux = { kernel = kernels.linux; };
isSunOS = { kernel = kernels.solaris; };
isFreeBSD = { kernel = kernels.freebsd; };
isNetBSD = { kernel = kernels.netbsd; };
isOpenBSD = { kernel = kernels.openbsd; };
isWindows = { kernel = kernels.windows; };
isCygwin = { kernel = kernels.windows; abi = abis.cygnus; };
isMinGW = { kernel = kernels.windows; abi = abis.gnu; };
isAndroid = [ { abi = abis.android; } { abi = abis.androideabi; } ];
isMusl = with abis; map (a: { abi = a; }) [ musl musleabi musleabihf ];
isUClibc = with abis; map (a: { abi = a; }) [ uclibc uclibceabi uclibceabihf ];
isEfi = map (family: { cpu.family = family; })
[ "x86" "arm" "aarch64" ];
# Deprecated after 18.03
isArm = isAarch32;
};
matchAnyAttrs = patterns:
if builtins.isList patterns then attrs: any (pattern: matchAttrs pattern attrs) patterns
else matchAttrs patterns;
predicates = mapAttrs (_: matchAnyAttrs) patterns;
}

View File

@@ -1,327 +0,0 @@
# Define the list of system with their properties.
#
# See https://clang.llvm.org/docs/CrossCompilation.html and
# http://llvm.org/docs/doxygen/html/Triple_8cpp_source.html especially
# Triple::normalize. Parsing should essentially act as a more conservative
# version of that last function.
#
# Most of the types below come in "open" and "closed" pairs. The open ones
# specify what information we need to know about systems in general, and the
# closed ones are sub-types representing the whitelist of systems we support in
# practice.
#
# Code in the remainder of nixpkgs shouldn't rely on the closed ones in
# e.g. exhaustive cases. Its more a sanity check to make sure nobody defines
# systems that overlap with existing ones and won't notice something amiss.
#
{ lib }:
with lib.lists;
with lib.types;
with lib.attrsets;
with lib.strings;
with (import ./inspect.nix { inherit lib; }).predicates;
let
inherit (lib.options) mergeOneOption;
setTypes = type:
mapAttrs (name: value:
assert type.check value;
setType type.name ({ inherit name; } // value));
in
rec {
################################################################################
types.openSignificantByte = mkOptionType {
name = "significant-byte";
description = "Endianness";
merge = mergeOneOption;
};
types.significantByte = enum (attrValues significantBytes);
significantBytes = setTypes types.openSignificantByte {
bigEndian = {};
littleEndian = {};
};
################################################################################
# Reasonable power of 2
types.bitWidth = enum [ 8 16 32 64 128 ];
################################################################################
types.openCpuType = mkOptionType {
name = "cpu-type";
description = "instruction set architecture name and information";
merge = mergeOneOption;
check = x: types.bitWidth.check x.bits
&& (if 8 < x.bits
then types.significantByte.check x.significantByte
else !(x ? significantByte));
};
types.cpuType = enum (attrValues cpuTypes);
cpuTypes = with significantBytes; setTypes types.openCpuType {
arm = { bits = 32; significantByte = littleEndian; family = "arm"; };
armv5tel = { bits = 32; significantByte = littleEndian; family = "arm"; version = "5"; };
armv6m = { bits = 32; significantByte = littleEndian; family = "arm"; version = "6"; };
armv6l = { bits = 32; significantByte = littleEndian; family = "arm"; version = "6"; };
armv7a = { bits = 32; significantByte = littleEndian; family = "arm"; version = "7"; };
armv7r = { bits = 32; significantByte = littleEndian; family = "arm"; version = "7"; };
armv7m = { bits = 32; significantByte = littleEndian; family = "arm"; version = "7"; };
armv7l = { bits = 32; significantByte = littleEndian; family = "arm"; version = "7"; };
armv8a = { bits = 32; significantByte = littleEndian; family = "arm"; version = "8"; };
armv8r = { bits = 32; significantByte = littleEndian; family = "arm"; version = "8"; };
armv8m = { bits = 32; significantByte = littleEndian; family = "arm"; version = "8"; };
aarch64 = { bits = 64; significantByte = littleEndian; family = "arm"; version = "8"; };
i686 = { bits = 32; significantByte = littleEndian; family = "x86"; };
x86_64 = { bits = 64; significantByte = littleEndian; family = "x86"; };
mips = { bits = 32; significantByte = bigEndian; family = "mips"; };
mipsel = { bits = 32; significantByte = littleEndian; family = "mips"; };
mips64 = { bits = 64; significantByte = bigEndian; family = "mips"; };
mips64el = { bits = 64; significantByte = littleEndian; family = "mips"; };
powerpc = { bits = 32; significantByte = bigEndian; family = "power"; };
powerpc64 = { bits = 64; significantByte = bigEndian; family = "power"; };
powerpc64le = { bits = 64; significantByte = littleEndian; family = "power"; };
riscv32 = { bits = 32; significantByte = littleEndian; family = "riscv"; };
riscv64 = { bits = 64; significantByte = littleEndian; family = "riscv"; };
sparc = { bits = 32; significantByte = bigEndian; family = "sparc"; };
sparc64 = { bits = 64; significantByte = bigEndian; family = "sparc"; };
wasm32 = { bits = 32; significantByte = littleEndian; family = "wasm"; };
wasm64 = { bits = 64; significantByte = littleEndian; family = "wasm"; };
};
################################################################################
types.openVendor = mkOptionType {
name = "vendor";
description = "vendor for the platform";
merge = mergeOneOption;
};
types.vendor = enum (attrValues vendors);
vendors = setTypes types.openVendor {
apple = {};
pc = {};
unknown = {};
};
################################################################################
types.openExecFormat = mkOptionType {
name = "exec-format";
description = "executable container used by the kernel";
merge = mergeOneOption;
};
types.execFormat = enum (attrValues execFormats);
execFormats = setTypes types.openExecFormat {
aout = {}; # a.out
elf = {};
macho = {};
pe = {};
unknown = {};
};
################################################################################
types.openKernelFamily = mkOptionType {
name = "exec-format";
description = "executable container used by the kernel";
merge = mergeOneOption;
};
types.kernelFamily = enum (attrValues kernelFamilies);
kernelFamilies = setTypes types.openKernelFamily {
bsd = {};
darwin = {};
};
################################################################################
types.openKernel = mkOptionType {
name = "kernel";
description = "kernel name and information";
merge = mergeOneOption;
check = x: types.execFormat.check x.execFormat
&& all types.kernelFamily.check (attrValues x.families);
};
types.kernel = enum (attrValues kernels);
kernels = with execFormats; with kernelFamilies; setTypes types.openKernel {
# TODO(@Ericson2314): Don't want to mass-rebuild yet to keeping 'darwin' as
# the nnormalized name for macOS.
macos = { execFormat = macho; families = { inherit darwin; }; name = "darwin"; };
ios = { execFormat = macho; families = { inherit darwin; }; };
freebsd = { execFormat = elf; families = { inherit bsd; }; };
linux = { execFormat = elf; families = { }; };
netbsd = { execFormat = elf; families = { inherit bsd; }; };
none = { execFormat = unknown; families = { }; };
openbsd = { execFormat = elf; families = { inherit bsd; }; };
solaris = { execFormat = elf; families = { }; };
windows = { execFormat = pe; families = { }; };
} // { # aliases
# 'darwin' is the kernel for all of them. We choose macOS by default.
darwin = kernels.macos;
watchos = kernels.ios;
tvos = kernels.ios;
win32 = kernels.windows;
};
################################################################################
types.openAbi = mkOptionType {
name = "abi";
description = "binary interface for compiled code and syscalls";
merge = mergeOneOption;
};
types.abi = enum (attrValues abis);
abis = setTypes types.openAbi {
cygnus = {};
msvc = {};
eabi = {};
androideabi = {};
android = {
assertions = [
{ assertion = platform: !platform.isAarch32;
message = ''
The "android" ABI is not for 32-bit ARM. Use "androideabi" instead.
'';
}
];
};
gnueabi = { float = "soft"; };
gnueabihf = { float = "hard"; };
gnu = {
assertions = [
{ assertion = platform: !platform.isAarch32;
message = ''
The "gnu" ABI is ambiguous on 32-bit ARM. Use "gnueabi" or "gnueabihf" instead.
'';
}
];
};
musleabi = { float = "soft"; };
musleabihf = { float = "hard"; };
musl = {};
uclibceabihf = { float = "soft"; };
uclibceabi = { float = "hard"; };
uclibc = {};
unknown = {};
};
################################################################################
types.parsedPlatform = mkOptionType {
name = "system";
description = "fully parsed representation of llvm- or nix-style platform tuple";
merge = mergeOneOption;
check = { cpu, vendor, kernel, abi }:
types.cpuType.check cpu
&& types.vendor.check vendor
&& types.kernel.check kernel
&& types.abi.check abi;
};
isSystem = isType "system";
mkSystem = components:
assert types.parsedPlatform.check components;
setType "system" components;
mkSkeletonFromList = l: {
"2" = # We only do 2-part hacks for things Nix already supports
if elemAt l 1 == "cygwin"
then { cpu = elemAt l 0; kernel = "windows"; abi = "cygnus"; }
else { cpu = elemAt l 0; kernel = elemAt l 1; };
"3" = # Awkwards hacks, beware!
if elemAt l 1 == "apple"
then { cpu = elemAt l 0; vendor = "apple"; kernel = elemAt l 2; }
else if (elemAt l 1 == "linux") || (elemAt l 2 == "gnu")
then { cpu = elemAt l 0; kernel = elemAt l 1; abi = elemAt l 2; }
else if (elemAt l 2 == "mingw32") # autotools breaks on -gnu for window
then { cpu = elemAt l 0; vendor = elemAt l 1; kernel = "windows"; abi = "gnu"; }
else if hasPrefix "netbsd" (elemAt l 2)
then { cpu = elemAt l 0; vendor = elemAt l 1; kernel = elemAt l 2; }
else throw "Target specification with 3 components is ambiguous";
"4" = { cpu = elemAt l 0; vendor = elemAt l 1; kernel = elemAt l 2; abi = elemAt l 3; };
}.${toString (length l)}
or (throw "system string has invalid number of hyphen-separated components");
# This should revert the job done by config.guess from the gcc compiler.
mkSystemFromSkeleton = { cpu
, # Optional, but fallback too complex for here.
# Inferred below instead.
vendor ? assert false; null
, kernel
, # Also inferred below
abi ? assert false; null
} @ args: let
getCpu = name: cpuTypes.${name} or (throw "Unknown CPU type: ${name}");
getVendor = name: vendors.${name} or (throw "Unknown vendor: ${name}");
getKernel = name: kernels.${name} or (throw "Unknown kernel: ${name}");
getAbi = name: abis.${name} or (throw "Unknown ABI: ${name}");
parsed = rec {
cpu = getCpu args.cpu;
vendor =
/**/ if args ? vendor then getVendor args.vendor
else if isDarwin parsed then vendors.apple
else if isWindows parsed then vendors.pc
else vendors.unknown;
kernel = if hasPrefix "darwin" args.kernel then getKernel "darwin"
else if hasPrefix "netbsd" args.kernel then getKernel "netbsd"
else getKernel args.kernel;
abi =
/**/ if args ? abi then getAbi args.abi
else if isLinux parsed then
if isAarch32 parsed then
if lib.versionAtLeast (parsed.cpu.version or "0") "6"
then abis.gnueabihf
else abis.gnueabi
else abis.gnu
else if isWindows parsed then abis.gnu
else abis.unknown;
};
in mkSystem parsed;
mkSystemFromString = s: mkSystemFromSkeleton (mkSkeletonFromList (lib.splitString "-" s));
doubleFromSystem = { cpu, vendor, kernel, abi, ... }:
/**/ if abi == abis.cygnus then "${cpu.name}-cygwin"
else if kernel.families ? darwin then "${cpu.name}-darwin"
else "${cpu.name}-${kernel.name}";
tripleFromSystem = { cpu, vendor, kernel, abi, ... } @ sys: assert isSystem sys; let
optAbi = lib.optionalString (abi != abis.unknown) "-${abi.name}";
in "${cpu.name}-${vendor.name}-${kernel.name}${optAbi}";
################################################################################
}

Some files were not shown because too many files have changed in this diff Show More