treewide: format all inactive Nix files
After final improvements to the official formatter implementation, this commit now performs the first treewide reformat of Nix files using it. This is part of the implementation of RFC 166. Only "inactive" files are reformatted, meaning only files that aren't being touched by any PR with activity in the past 2 months. This is to avoid conflicts for PRs that might soon be merged. Later we can do a full treewide reformat to get the rest, which should not cause as many conflicts. A CI check has already been running for some time to ensure that new and already-formatted files are formatted, so the files being reformatted here should also stay formatted. This commit was automatically created and can be verified using nix-builda08b3a4d19
.tar.gz \ --argstr baseRevb32a094368
result/bin/apply-formatting $NIXPKGS_PATH
This commit is contained in:
parent
b32a094368
commit
4f0dadbf38
@ -1,4 +1,6 @@
|
|||||||
let requiredVersion = import ./lib/minver.nix; in
|
let
|
||||||
|
requiredVersion = import ./lib/minver.nix;
|
||||||
|
in
|
||||||
|
|
||||||
if !builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
|
if !builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
|
||||||
|
|
||||||
|
@ -1,65 +1,72 @@
|
|||||||
{ nixpkgsPath, revision, libsetsJSON }:
|
{
|
||||||
|
nixpkgsPath,
|
||||||
|
revision,
|
||||||
|
libsetsJSON,
|
||||||
|
}:
|
||||||
let
|
let
|
||||||
lib = import (nixpkgsPath + "/lib");
|
lib = import (nixpkgsPath + "/lib");
|
||||||
libsets = builtins.fromJSON libsetsJSON;
|
libsets = builtins.fromJSON libsetsJSON;
|
||||||
|
|
||||||
libDefPos = prefix: set:
|
libDefPos =
|
||||||
builtins.concatMap
|
prefix: set:
|
||||||
(name: [{
|
builtins.concatMap (
|
||||||
|
name:
|
||||||
|
[
|
||||||
|
{
|
||||||
name = builtins.concatStringsSep "." (prefix ++ [ name ]);
|
name = builtins.concatStringsSep "." (prefix ++ [ name ]);
|
||||||
location = builtins.unsafeGetAttrPos name set;
|
location = builtins.unsafeGetAttrPos name set;
|
||||||
}] ++ lib.optionals
|
}
|
||||||
(builtins.length prefix == 0 && builtins.isAttrs set.${name})
|
]
|
||||||
(libDefPos (prefix ++ [name]) set.${name})
|
++ lib.optionals (builtins.length prefix == 0 && builtins.isAttrs set.${name}) (
|
||||||
|
libDefPos (prefix ++ [ name ]) set.${name}
|
||||||
|
)
|
||||||
) (builtins.attrNames set);
|
) (builtins.attrNames set);
|
||||||
|
|
||||||
libset = toplib:
|
libset =
|
||||||
builtins.map
|
toplib:
|
||||||
(subsetname: {
|
builtins.map (subsetname: {
|
||||||
subsetname = subsetname;
|
subsetname = subsetname;
|
||||||
functions = libDefPos [ ] toplib.${subsetname};
|
functions = libDefPos [ ] toplib.${subsetname};
|
||||||
})
|
}) (builtins.map (x: x.name) libsets);
|
||||||
(builtins.map (x: x.name) libsets);
|
|
||||||
|
|
||||||
flattenedLibSubset = { subsetname, functions }:
|
flattenedLibSubset =
|
||||||
builtins.map
|
{ subsetname, functions }:
|
||||||
(fn: {
|
builtins.map (fn: {
|
||||||
name = "lib.${subsetname}.${fn.name}";
|
name = "lib.${subsetname}.${fn.name}";
|
||||||
value = fn.location;
|
value = fn.location;
|
||||||
})
|
}) functions;
|
||||||
functions;
|
|
||||||
|
|
||||||
locatedlibsets = libs: builtins.map flattenedLibSubset (libset libs);
|
locatedlibsets = libs: builtins.map flattenedLibSubset (libset libs);
|
||||||
removeFilenamePrefix = prefix: filename:
|
removeFilenamePrefix =
|
||||||
|
prefix: filename:
|
||||||
let
|
let
|
||||||
prefixLen = (builtins.stringLength prefix) + 1; # +1 to remove the leading /
|
prefixLen = (builtins.stringLength prefix) + 1; # +1 to remove the leading /
|
||||||
filenameLen = builtins.stringLength filename;
|
filenameLen = builtins.stringLength filename;
|
||||||
substr = builtins.substring prefixLen filenameLen filename;
|
substr = builtins.substring prefixLen filenameLen filename;
|
||||||
in substr;
|
in
|
||||||
|
substr;
|
||||||
|
|
||||||
removeNixpkgs = removeFilenamePrefix (builtins.toString nixpkgsPath);
|
removeNixpkgs = removeFilenamePrefix (builtins.toString nixpkgsPath);
|
||||||
|
|
||||||
liblocations =
|
liblocations = builtins.filter (elem: elem.value != null) (lib.lists.flatten (locatedlibsets lib));
|
||||||
builtins.filter
|
|
||||||
(elem: elem.value != null)
|
|
||||||
(lib.lists.flatten
|
|
||||||
(locatedlibsets lib));
|
|
||||||
|
|
||||||
fnLocationRelative = { name, value }:
|
fnLocationRelative =
|
||||||
|
{ name, value }:
|
||||||
{
|
{
|
||||||
inherit name;
|
inherit name;
|
||||||
value = value // { file = removeNixpkgs value.file; };
|
value = value // {
|
||||||
|
file = removeNixpkgs value.file;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
relativeLocs = (builtins.map fnLocationRelative liblocations);
|
relativeLocs = (builtins.map fnLocationRelative liblocations);
|
||||||
sanitizeId = builtins.replaceStrings
|
sanitizeId = builtins.replaceStrings [ "'" ] [ "-prime" ];
|
||||||
[ "'" ]
|
|
||||||
[ "-prime" ];
|
|
||||||
|
|
||||||
urlPrefix = "https://github.com/NixOS/nixpkgs/blob/${revision}";
|
urlPrefix = "https://github.com/NixOS/nixpkgs/blob/${revision}";
|
||||||
jsonLocs = builtins.listToAttrs
|
jsonLocs = builtins.listToAttrs (
|
||||||
(builtins.map
|
builtins.map (
|
||||||
({ name, value }: {
|
{ name, value }:
|
||||||
|
{
|
||||||
name = sanitizeId name;
|
name = sanitizeId name;
|
||||||
value =
|
value =
|
||||||
let
|
let
|
||||||
@ -67,8 +74,9 @@ let
|
|||||||
target = "${urlPrefix}/${value.file}#L${builtins.toString value.line}";
|
target = "${urlPrefix}/${value.file}#L${builtins.toString value.line}";
|
||||||
in
|
in
|
||||||
"[${text}](${target}) in `<nixpkgs>`";
|
"[${text}](${target}) in `<nixpkgs>`";
|
||||||
})
|
}
|
||||||
relativeLocs);
|
) relativeLocs
|
||||||
|
);
|
||||||
|
|
||||||
in
|
in
|
||||||
jsonLocs
|
jsonLocs
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ "\t" = 9;
|
{
|
||||||
|
"\t" = 9;
|
||||||
"\n" = 10;
|
"\n" = 10;
|
||||||
"\r" = 13;
|
"\r" = 13;
|
||||||
" " = 32;
|
" " = 32;
|
||||||
|
@ -36,10 +36,7 @@ rec {
|
|||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
# TODO(Profpatsch): add tests that check stderr
|
# TODO(Profpatsch): add tests that check stderr
|
||||||
assertMsg =
|
assertMsg = pred: msg: pred || builtins.throw msg;
|
||||||
pred:
|
|
||||||
msg:
|
|
||||||
pred || builtins.throw msg;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Specialized `assertMsg` for checking if `val` is one of the elements
|
Specialized `assertMsg` for checking if `val` is one of the elements
|
||||||
@ -81,14 +78,10 @@ rec {
|
|||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
assertOneOf =
|
assertOneOf =
|
||||||
name:
|
name: val: xs:
|
||||||
val:
|
assertMsg (lib.elem val xs) "${name} must be one of ${lib.generators.toPretty { } xs}, but is: ${
|
||||||
xs:
|
lib.generators.toPretty { } val
|
||||||
assertMsg
|
}";
|
||||||
(lib.elem val xs)
|
|
||||||
"${name} must be one of ${
|
|
||||||
lib.generators.toPretty {} xs}, but is: ${
|
|
||||||
lib.generators.toPretty {} val}";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Specialized `assertMsg` for checking if every one of `vals` is one of the elements
|
Specialized `assertMsg` for checking if every one of `vals` is one of the elements
|
||||||
@ -133,12 +126,9 @@ rec {
|
|||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
assertEachOneOf =
|
assertEachOneOf =
|
||||||
name:
|
name: vals: xs:
|
||||||
vals:
|
assertMsg (lib.all (val: lib.elem val xs) vals)
|
||||||
xs:
|
"each element in ${name} must be one of ${lib.generators.toPretty { } xs}, but is: ${
|
||||||
assertMsg
|
lib.generators.toPretty { } vals
|
||||||
(lib.all (val: lib.elem val xs) vals)
|
}";
|
||||||
"each element in ${name} must be one of ${
|
|
||||||
lib.generators.toPretty {} xs}, but is: ${
|
|
||||||
lib.generators.toPretty {} vals}";
|
|
||||||
}
|
}
|
||||||
|
40
lib/cli.nix
40
lib/cli.nix
@ -9,7 +9,6 @@ rec {
|
|||||||
|
|
||||||
`toGNUCommandLineShell` returns an escaped shell string.
|
`toGNUCommandLineShell` returns an escaped shell string.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`options`
|
`options`
|
||||||
@ -20,7 +19,6 @@ rec {
|
|||||||
|
|
||||||
: The attributes to transform into arguments.
|
: The attributes to transform into arguments.
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.cli.toGNUCommandLineShell` usage example
|
## `lib.cli.toGNUCommandLineShell` usage example
|
||||||
@ -40,15 +38,13 @@ rec {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
toGNUCommandLineShell =
|
toGNUCommandLineShell = options: attrs: lib.escapeShellArgs (toGNUCommandLine options attrs);
|
||||||
options: attrs: lib.escapeShellArgs (toGNUCommandLine options attrs);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Automatically convert an attribute set to a list of command-line options.
|
Automatically convert an attribute set to a list of command-line options.
|
||||||
|
|
||||||
`toGNUCommandLine` returns a list of string arguments.
|
`toGNUCommandLine` returns a list of string arguments.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`options`
|
`options`
|
||||||
@ -76,7 +72,6 @@ rec {
|
|||||||
: How to format a list value to a command list;
|
: How to format a list value to a command list;
|
||||||
By default the option name is repeated for each value and `mkOption` is applied to the values themselves.
|
By default the option name is repeated for each value and `mkOption` is applied to the values themselves.
|
||||||
|
|
||||||
|
|
||||||
`mkOption`
|
`mkOption`
|
||||||
|
|
||||||
: How to format any remaining value to a command list;
|
: How to format any remaining value to a command list;
|
||||||
@ -89,7 +84,6 @@ rec {
|
|||||||
By default, there is no separator, so option `-c` and value `5` would become ["-c" "5"].
|
By default, there is no separator, so option `-c` and value `5` would become ["-c" "5"].
|
||||||
This is useful if the command requires equals, for example, `-c=5`.
|
This is useful if the command requires equals, for example, `-c=5`.
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.cli.toGNUCommandLine` usage example
|
## `lib.cli.toGNUCommandLine` usage example
|
||||||
@ -116,32 +110,38 @@ rec {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
toGNUCommandLine = {
|
toGNUCommandLine =
|
||||||
mkOptionName ?
|
{
|
||||||
k: if builtins.stringLength k == 1
|
mkOptionName ? k: if builtins.stringLength k == 1 then "-${k}" else "--${k}",
|
||||||
then "-${k}"
|
|
||||||
else "--${k}",
|
|
||||||
|
|
||||||
mkBool ? k: v: lib.optional v (mkOptionName k),
|
mkBool ? k: v: lib.optional v (mkOptionName k),
|
||||||
|
|
||||||
mkList ? k: v: lib.concatMap (mkOption k) v,
|
mkList ? k: v: lib.concatMap (mkOption k) v,
|
||||||
|
|
||||||
mkOption ?
|
mkOption ?
|
||||||
k: v: if v == null
|
k: v:
|
||||||
then []
|
if v == null then
|
||||||
|
[ ]
|
||||||
else if optionValueSeparator == null then
|
else if optionValueSeparator == null then
|
||||||
[ (mkOptionName k) (lib.generators.mkValueStringDefault {} v) ]
|
[
|
||||||
|
(mkOptionName k)
|
||||||
|
(lib.generators.mkValueStringDefault { } v)
|
||||||
|
]
|
||||||
else
|
else
|
||||||
[ "${mkOptionName k}${optionValueSeparator}${lib.generators.mkValueStringDefault { } v}" ],
|
[ "${mkOptionName k}${optionValueSeparator}${lib.generators.mkValueStringDefault { } v}" ],
|
||||||
|
|
||||||
optionValueSeparator ? null
|
optionValueSeparator ? null,
|
||||||
}:
|
}:
|
||||||
options:
|
options:
|
||||||
let
|
let
|
||||||
render = k: v:
|
render =
|
||||||
if builtins.isBool v then mkBool k v
|
k: v:
|
||||||
else if builtins.isList v then mkList k v
|
if builtins.isBool v then
|
||||||
else mkOption k v;
|
mkBool k v
|
||||||
|
else if builtins.isList v then
|
||||||
|
mkList k v
|
||||||
|
else
|
||||||
|
mkOption k v;
|
||||||
|
|
||||||
in
|
in
|
||||||
builtins.concatLists (lib.mapAttrsToList render options);
|
builtins.concatLists (lib.mapAttrsToList render options);
|
||||||
|
124
lib/debug.nix
124
lib/debug.nix
@ -26,7 +26,8 @@ let
|
|||||||
generators
|
generators
|
||||||
id
|
id
|
||||||
mapAttrs
|
mapAttrs
|
||||||
trace;
|
trace
|
||||||
|
;
|
||||||
in
|
in
|
||||||
|
|
||||||
rec {
|
rec {
|
||||||
@ -36,7 +37,6 @@ rec {
|
|||||||
/**
|
/**
|
||||||
Conditionally trace the supplied message, based on a predicate.
|
Conditionally trace the supplied message, based on a predicate.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`pred`
|
`pred`
|
||||||
@ -70,15 +70,13 @@ rec {
|
|||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
traceIf =
|
traceIf =
|
||||||
pred:
|
pred: msg: x:
|
||||||
msg:
|
if pred then trace msg x else x;
|
||||||
x: if pred then trace msg x else x;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Trace the supplied value after applying a function to it, and
|
Trace the supplied value after applying a function to it, and
|
||||||
return the original value.
|
return the original value.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`f`
|
`f`
|
||||||
@ -107,9 +105,7 @@ rec {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
traceValFn =
|
traceValFn = f: x: trace (f x) x;
|
||||||
f:
|
|
||||||
x: trace (f x) x;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Trace the supplied value and return it.
|
Trace the supplied value and return it.
|
||||||
@ -143,7 +139,6 @@ rec {
|
|||||||
/**
|
/**
|
||||||
`builtins.trace`, but the value is `builtins.deepSeq`ed first.
|
`builtins.trace`, but the value is `builtins.deepSeq`ed first.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`x`
|
`x`
|
||||||
@ -175,16 +170,13 @@ rec {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
traceSeq =
|
traceSeq = x: y: trace (builtins.deepSeq x x) y;
|
||||||
x:
|
|
||||||
y: trace (builtins.deepSeq x x) y;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Like `traceSeq`, but only evaluate down to depth n.
|
Like `traceSeq`, but only evaluate down to depth n.
|
||||||
This is very useful because lots of `traceSeq` usages
|
This is very useful because lots of `traceSeq` usages
|
||||||
lead to an infinite recursion.
|
lead to an infinite recursion.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`depth`
|
`depth`
|
||||||
@ -217,25 +209,39 @@ rec {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
traceSeqN = depth: x: y:
|
traceSeqN =
|
||||||
let snip = v: if isList v then noQuotes "[…]" v
|
depth: x: y:
|
||||||
else if isAttrs v then noQuotes "{…}" v
|
let
|
||||||
else v;
|
snip =
|
||||||
noQuotes = str: v: { __pretty = const str; val = v; };
|
v:
|
||||||
modify = n: fn: v: if (n == 0) then fn v
|
if isList v then
|
||||||
else if isList v then map (modify (n - 1) fn) v
|
noQuotes "[…]" v
|
||||||
else if isAttrs v then mapAttrs
|
else if isAttrs v then
|
||||||
(const (modify (n - 1) fn)) v
|
noQuotes "{…}" v
|
||||||
else v;
|
else
|
||||||
in trace (generators.toPretty { allowPrettyValues = true; }
|
v;
|
||||||
(modify depth snip x)) y;
|
noQuotes = str: v: {
|
||||||
|
__pretty = const str;
|
||||||
|
val = v;
|
||||||
|
};
|
||||||
|
modify =
|
||||||
|
n: fn: v:
|
||||||
|
if (n == 0) then
|
||||||
|
fn v
|
||||||
|
else if isList v then
|
||||||
|
map (modify (n - 1) fn) v
|
||||||
|
else if isAttrs v then
|
||||||
|
mapAttrs (const (modify (n - 1) fn)) v
|
||||||
|
else
|
||||||
|
v;
|
||||||
|
in
|
||||||
|
trace (generators.toPretty { allowPrettyValues = true; } (modify depth snip x)) y;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
A combination of `traceVal` and `traceSeq` that applies a
|
A combination of `traceVal` and `traceSeq` that applies a
|
||||||
provided function to the value to be traced after `deepSeq`ing
|
provided function to the value to be traced after `deepSeq`ing
|
||||||
it.
|
it.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`f`
|
`f`
|
||||||
@ -246,9 +252,7 @@ rec {
|
|||||||
|
|
||||||
: Value to trace
|
: Value to trace
|
||||||
*/
|
*/
|
||||||
traceValSeqFn =
|
traceValSeqFn = f: v: traceValFn f (builtins.deepSeq v v);
|
||||||
f:
|
|
||||||
v: traceValFn f (builtins.deepSeq v v);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
A combination of `traceVal` and `traceSeq`.
|
A combination of `traceVal` and `traceSeq`.
|
||||||
@ -258,7 +262,6 @@ rec {
|
|||||||
`v`
|
`v`
|
||||||
|
|
||||||
: Value to trace
|
: Value to trace
|
||||||
|
|
||||||
*/
|
*/
|
||||||
traceValSeq = traceValSeqFn id;
|
traceValSeq = traceValSeqFn id;
|
||||||
|
|
||||||
@ -266,7 +269,6 @@ rec {
|
|||||||
A combination of `traceVal` and `traceSeqN` that applies a
|
A combination of `traceVal` and `traceSeqN` that applies a
|
||||||
provided function to the value to be traced.
|
provided function to the value to be traced.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`f`
|
`f`
|
||||||
@ -282,9 +284,8 @@ rec {
|
|||||||
: Value to trace
|
: Value to trace
|
||||||
*/
|
*/
|
||||||
traceValSeqNFn =
|
traceValSeqNFn =
|
||||||
f:
|
f: depth: v:
|
||||||
depth:
|
traceSeqN depth (f v) v;
|
||||||
v: traceSeqN depth (f v) v;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
A combination of `traceVal` and `traceSeqN`.
|
A combination of `traceVal` and `traceSeqN`.
|
||||||
@ -308,7 +309,6 @@ rec {
|
|||||||
This is useful for adding around a function call,
|
This is useful for adding around a function call,
|
||||||
to see the before/after of values as they are transformed.
|
to see the before/after of values as they are transformed.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`depth`
|
`depth`
|
||||||
@ -327,7 +327,6 @@ rec {
|
|||||||
|
|
||||||
: 4\. Function argument
|
: 4\. Function argument
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.debug.traceFnSeqN` usage example
|
## `lib.debug.traceFnSeqN` usage example
|
||||||
@ -340,17 +339,16 @@ rec {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
traceFnSeqN = depth: name: f: v:
|
traceFnSeqN =
|
||||||
let res = f v;
|
depth: name: f: v:
|
||||||
in lib.traceSeqN
|
let
|
||||||
(depth + 1)
|
res = f v;
|
||||||
{
|
in
|
||||||
|
lib.traceSeqN (depth + 1) {
|
||||||
fn = name;
|
fn = name;
|
||||||
from = v;
|
from = v;
|
||||||
to = res;
|
to = res;
|
||||||
}
|
} res;
|
||||||
res;
|
|
||||||
|
|
||||||
|
|
||||||
# -- TESTING --
|
# -- TESTING --
|
||||||
|
|
||||||
@ -375,7 +373,6 @@ rec {
|
|||||||
|
|
||||||
- If you want to run only a subset of the tests add the attribute `tests = ["testName"];`
|
- If you want to run only a subset of the tests add the attribute `tests = ["testName"];`
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`tests`
|
`tests`
|
||||||
@ -430,26 +427,42 @@ rec {
|
|||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
runTests =
|
runTests =
|
||||||
tests: concatLists (attrValues (mapAttrs (name: test:
|
tests:
|
||||||
let testsToRun = if tests ? tests then tests.tests else [];
|
concatLists (
|
||||||
in if (substring 0 4 name == "test" || elem name testsToRun)
|
attrValues (
|
||||||
|
mapAttrs (
|
||||||
|
name: test:
|
||||||
|
let
|
||||||
|
testsToRun = if tests ? tests then tests.tests else [ ];
|
||||||
|
in
|
||||||
|
if
|
||||||
|
(substring 0 4 name == "test" || elem name testsToRun)
|
||||||
&& ((testsToRun == [ ]) || elem name tests.tests)
|
&& ((testsToRun == [ ]) || elem name tests.tests)
|
||||||
&& (test.expr != test.expected)
|
&& (test.expr != test.expected)
|
||||||
|
|
||||||
then [ { inherit name; expected = test.expected; result = test.expr; } ]
|
then
|
||||||
else [] ) tests));
|
[
|
||||||
|
{
|
||||||
|
inherit name;
|
||||||
|
expected = test.expected;
|
||||||
|
result = test.expr;
|
||||||
|
}
|
||||||
|
]
|
||||||
|
else
|
||||||
|
[ ]
|
||||||
|
) tests
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Create a test assuming that list elements are `true`.
|
Create a test assuming that list elements are `true`.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`expr`
|
`expr`
|
||||||
|
|
||||||
: 1\. Function argument
|
: 1\. Function argument
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.debug.testAllTrue` usage example
|
## `lib.debug.testAllTrue` usage example
|
||||||
@ -460,5 +473,8 @@ rec {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
testAllTrue = expr: { inherit expr; expected = map (x: true) expr; };
|
testAllTrue = expr: {
|
||||||
|
inherit expr;
|
||||||
|
expected = map (x: true) expr;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
@ -9,14 +9,16 @@ let
|
|||||||
throwIfNot
|
throwIfNot
|
||||||
;
|
;
|
||||||
|
|
||||||
showMaybeAttrPosPre = prefix: attrName: v:
|
showMaybeAttrPosPre =
|
||||||
let pos = builtins.unsafeGetAttrPos attrName v;
|
prefix: attrName: v:
|
||||||
in if pos == null then "" else "${prefix}${pos.file}:${toString pos.line}:${toString pos.column}";
|
let
|
||||||
|
pos = builtins.unsafeGetAttrPos attrName v;
|
||||||
|
in
|
||||||
|
if pos == null then "" else "${prefix}${pos.file}:${toString pos.line}:${toString pos.column}";
|
||||||
|
|
||||||
showMaybePackagePosPre = prefix: pkg:
|
showMaybePackagePosPre =
|
||||||
if pkg?meta.position && isString pkg.meta.position
|
prefix: pkg:
|
||||||
then "${prefix}${pkg.meta.position}"
|
if pkg ? meta.position && isString pkg.meta.position then "${prefix}${pkg.meta.position}" else "";
|
||||||
else "";
|
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
/**
|
/**
|
||||||
@ -94,15 +96,14 @@ in
|
|||||||
derivation,
|
derivation,
|
||||||
meta ? null,
|
meta ? null,
|
||||||
passthru ? { },
|
passthru ? { },
|
||||||
outputs ? [ "out" ]
|
outputs ? [ "out" ],
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
# These checks are strict in `drv` and some `drv` attributes, but the
|
# These checks are strict in `drv` and some `drv` attributes, but the
|
||||||
# attrset spine returned by lazyDerivation does not depend on it.
|
# attrset spine returned by lazyDerivation does not depend on it.
|
||||||
# Instead, the individual derivation attributes do depend on it.
|
# Instead, the individual derivation attributes do depend on it.
|
||||||
checked =
|
checked =
|
||||||
throwIfNot (derivation.type or null == "derivation")
|
throwIfNot (derivation.type or null == "derivation") "lazyDerivation: input must be a derivation."
|
||||||
"lazyDerivation: input must be a derivation."
|
|
||||||
throwIfNot
|
throwIfNot
|
||||||
# NOTE: Technically we could require our outputs to be a subset of the
|
# NOTE: Technically we could require our outputs to be a subset of the
|
||||||
# actual ones, or even leave them unchecked and fail on a lazy basis.
|
# actual ones, or even leave them unchecked and fail on a lazy basis.
|
||||||
@ -152,7 +153,13 @@ in
|
|||||||
# A fixed set of derivation values, so that `lazyDerivation` can return
|
# A fixed set of derivation values, so that `lazyDerivation` can return
|
||||||
# its attrset before evaluating `derivation`.
|
# its attrset before evaluating `derivation`.
|
||||||
# This must only list attributes that are available on _all_ derivations.
|
# This must only list attributes that are available on _all_ derivations.
|
||||||
inherit (checked) outPath outputName drvPath name system;
|
inherit (checked)
|
||||||
|
outPath
|
||||||
|
outputName
|
||||||
|
drvPath
|
||||||
|
name
|
||||||
|
system
|
||||||
|
;
|
||||||
inherit outputs;
|
inherit outputs;
|
||||||
|
|
||||||
# The meta attribute can either be taken from the derivation, or if the
|
# The meta attribute can either be taken from the derivation, or if the
|
||||||
@ -170,7 +177,6 @@ in
|
|||||||
Thus, this function passes through its `value` argument if the `cond`
|
Thus, this function passes through its `value` argument if the `cond`
|
||||||
is `true`, but returns `null` if not.
|
is `true`, but returns `null` if not.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`cond`
|
`cond`
|
||||||
@ -205,9 +211,7 @@ in
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
optionalDrvAttr =
|
optionalDrvAttr = cond: value: if cond then value else null;
|
||||||
cond:
|
|
||||||
value: if cond then value else null;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Wrap a derivation such that instantiating it produces a warning.
|
Wrap a derivation such that instantiating it produces a warning.
|
||||||
@ -238,8 +242,11 @@ in
|
|||||||
warnOnInstantiate =
|
warnOnInstantiate =
|
||||||
msg: drv:
|
msg: drv:
|
||||||
let
|
let
|
||||||
drvToWrap = removeAttrs drv [ "meta" "name" "type" ];
|
drvToWrap = removeAttrs drv [
|
||||||
|
"meta"
|
||||||
|
"name"
|
||||||
|
"type"
|
||||||
|
];
|
||||||
in
|
in
|
||||||
drv
|
drv // mapAttrs (_: lib.warn msg) drvToWrap;
|
||||||
// mapAttrs (_: lib.warn msg) drvToWrap;
|
|
||||||
}
|
}
|
||||||
|
@ -11,15 +11,24 @@ let
|
|||||||
sha256 = lib.fakeSha256;
|
sha256 = lib.fakeSha256;
|
||||||
sha512 = lib.fakeSha512;
|
sha512 = lib.fakeSha512;
|
||||||
};
|
};
|
||||||
in rec {
|
in
|
||||||
|
rec {
|
||||||
|
|
||||||
proxyImpureEnvVars = [
|
proxyImpureEnvVars = [
|
||||||
# We borrow these environment variables from the caller to allow
|
# We borrow these environment variables from the caller to allow
|
||||||
# easy proxy configuration. This is impure, but a fixed-output
|
# easy proxy configuration. This is impure, but a fixed-output
|
||||||
# derivation like fetchurl is allowed to do so since its result is
|
# derivation like fetchurl is allowed to do so since its result is
|
||||||
# by definition pure.
|
# by definition pure.
|
||||||
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
|
"http_proxy"
|
||||||
"HTTP_PROXY" "HTTPS_PROXY" "FTP_PROXY" "ALL_PROXY" "NO_PROXY"
|
"https_proxy"
|
||||||
|
"ftp_proxy"
|
||||||
|
"all_proxy"
|
||||||
|
"no_proxy"
|
||||||
|
"HTTP_PROXY"
|
||||||
|
"HTTPS_PROXY"
|
||||||
|
"FTP_PROXY"
|
||||||
|
"ALL_PROXY"
|
||||||
|
"NO_PROXY"
|
||||||
|
|
||||||
# https proxies typically need to inject custom root CAs too
|
# https proxies typically need to inject custom root CAs too
|
||||||
"NIX_SSL_CERT_FILE"
|
"NIX_SSL_CERT_FILE"
|
||||||
@ -77,13 +86,24 @@ in rec {
|
|||||||
required
|
required
|
||||||
: whether to throw if no hash was present in the input; otherwise returns the original input, unmodified
|
: whether to throw if no hash was present in the input; otherwise returns the original input, unmodified
|
||||||
*/
|
*/
|
||||||
normalizeHash = {
|
normalizeHash =
|
||||||
|
{
|
||||||
hashTypes ? [ "sha256" ],
|
hashTypes ? [ "sha256" ],
|
||||||
required ? true,
|
required ? true,
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
inherit (lib) concatMapStringsSep head tail throwIf;
|
inherit (lib)
|
||||||
inherit (lib.attrsets) attrsToList intersectAttrs removeAttrs optionalAttrs;
|
concatMapStringsSep
|
||||||
|
head
|
||||||
|
tail
|
||||||
|
throwIf
|
||||||
|
;
|
||||||
|
inherit (lib.attrsets)
|
||||||
|
attrsToList
|
||||||
|
intersectAttrs
|
||||||
|
removeAttrs
|
||||||
|
optionalAttrs
|
||||||
|
;
|
||||||
|
|
||||||
inherit (commonH hashTypes) hashNames hashSet;
|
inherit (commonH hashTypes) hashNames hashSet;
|
||||||
in
|
in
|
||||||
@ -95,24 +115,27 @@ in rec {
|
|||||||
# The argument hash, as a {name, value} pair
|
# The argument hash, as a {name, value} pair
|
||||||
h =
|
h =
|
||||||
# All hashes passed in arguments (possibly 0 or >1) as a list of {name, value} pairs
|
# All hashes passed in arguments (possibly 0 or >1) as a list of {name, value} pairs
|
||||||
let hashesAsNVPairs = attrsToList (intersectAttrs hashSet args); in
|
let
|
||||||
|
hashesAsNVPairs = attrsToList (intersectAttrs hashSet args);
|
||||||
|
in
|
||||||
if hashesAsNVPairs == [ ] then
|
if hashesAsNVPairs == [ ] then
|
||||||
throwIf required "fetcher called without `hash`" null
|
throwIf required "fetcher called without `hash`" null
|
||||||
else if tail hashesAsNVPairs != [ ] then
|
else if tail hashesAsNVPairs != [ ] then
|
||||||
throw "fetcher called with mutually-incompatible arguments: ${concatMapStringsSep ", " (a: a.name) hashesAsNVPairs}"
|
throw "fetcher called with mutually-incompatible arguments: ${
|
||||||
|
concatMapStringsSep ", " (a: a.name) hashesAsNVPairs
|
||||||
|
}"
|
||||||
else
|
else
|
||||||
head hashesAsNVPairs
|
head hashesAsNVPairs;
|
||||||
;
|
|
||||||
in
|
in
|
||||||
removeAttrs args hashNames // (optionalAttrs (h != null) {
|
removeAttrs args hashNames
|
||||||
|
// (optionalAttrs (h != null) {
|
||||||
outputHashAlgo = if h.name == "hash" then null else h.name;
|
outputHashAlgo = if h.name == "hash" then null else h.name;
|
||||||
outputHash =
|
outputHash =
|
||||||
if h.value == "" then
|
if h.value == "" then
|
||||||
fakeH.${h.name} or (throw "no “fake hash” defined for ${h.name}")
|
fakeH.${h.name} or (throw "no “fake hash” defined for ${h.name}")
|
||||||
else
|
else
|
||||||
h.value;
|
h.value;
|
||||||
})
|
});
|
||||||
;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Wraps a function which accepts `outputHash{,Algo}` into one which accepts `hash` or `sha{256,512}`
|
Wraps a function which accepts `outputHash{,Algo}` into one which accepts `hash` or `sha{256,512}`
|
||||||
@ -164,9 +187,11 @@ in rec {
|
|||||||
However, `withNormalizedHash` preserves `functionArgs` metadata insofar as possible,
|
However, `withNormalizedHash` preserves `functionArgs` metadata insofar as possible,
|
||||||
and is implemented somewhat more efficiently.
|
and is implemented somewhat more efficiently.
|
||||||
*/
|
*/
|
||||||
withNormalizedHash = {
|
withNormalizedHash =
|
||||||
hashTypes ? [ "sha256" ]
|
{
|
||||||
}: fetcher:
|
hashTypes ? [ "sha256" ],
|
||||||
|
}:
|
||||||
|
fetcher:
|
||||||
let
|
let
|
||||||
inherit (lib.attrsets) genAttrs intersectAttrs removeAttrs;
|
inherit (lib.attrsets) genAttrs intersectAttrs removeAttrs;
|
||||||
inherit (lib.trivial) const functionArgs setFunctionArgs;
|
inherit (lib.trivial) const functionArgs setFunctionArgs;
|
||||||
@ -183,7 +208,13 @@ in rec {
|
|||||||
assert fArgs ? outputHash && fArgs ? outputHashAlgo;
|
assert fArgs ? outputHash && fArgs ? outputHashAlgo;
|
||||||
assert intersectAttrs fArgs hashSet == { };
|
assert intersectAttrs fArgs hashSet == { };
|
||||||
|
|
||||||
setFunctionArgs
|
setFunctionArgs (args: fetcher (normalize args)) (
|
||||||
(args: fetcher (normalize args))
|
removeAttrs fArgs [
|
||||||
(removeAttrs fArgs [ "outputHash" "outputHashAlgo" ] // { hash = fArgs.outputHash; });
|
"outputHash"
|
||||||
|
"outputHashAlgo"
|
||||||
|
]
|
||||||
|
// {
|
||||||
|
hash = fArgs.outputHash;
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
@ -57,7 +57,6 @@
|
|||||||
If you need more file set functions,
|
If you need more file set functions,
|
||||||
see [this issue](https://github.com/NixOS/nixpkgs/issues/266356) to request it.
|
see [this issue](https://github.com/NixOS/nixpkgs/issues/266356) to request it.
|
||||||
|
|
||||||
|
|
||||||
# Implicit coercion from paths to file sets {#sec-fileset-path-coercion}
|
# Implicit coercion from paths to file sets {#sec-fileset-path-coercion}
|
||||||
|
|
||||||
All functions accepting file sets as arguments can also accept [paths](https://nixos.org/manual/nix/stable/language/values.html#type-path) as arguments.
|
All functions accepting file sets as arguments can also accept [paths](https://nixos.org/manual/nix/stable/language/values.html#type-path) as arguments.
|
||||||
@ -155,14 +154,14 @@ let
|
|||||||
pipe
|
pipe
|
||||||
;
|
;
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Create a file set from a path that may or may not exist:
|
Create a file set from a path that may or may not exist:
|
||||||
- If the path does exist, the path is [coerced to a file set](#sec-fileset-path-coercion).
|
- If the path does exist, the path is [coerced to a file set](#sec-fileset-path-coercion).
|
||||||
- If the path does not exist, a file set containing no files is returned.
|
- If the path does not exist, a file set containing no files is returned.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`path`
|
`path`
|
||||||
@ -190,11 +189,9 @@ in {
|
|||||||
path:
|
path:
|
||||||
if !isPath path then
|
if !isPath path then
|
||||||
if isStringLike path then
|
if isStringLike path then
|
||||||
throw ''
|
throw ''lib.fileset.maybeMissing: Argument ("${toString path}") is a string-like value, but it should be a path instead.''
|
||||||
lib.fileset.maybeMissing: Argument ("${toString path}") is a string-like value, but it should be a path instead.''
|
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''lib.fileset.maybeMissing: Argument is of type ${typeOf path}, but it should be a path instead.''
|
||||||
lib.fileset.maybeMissing: Argument is of type ${typeOf path}, but it should be a path instead.''
|
|
||||||
else if !pathExists path then
|
else if !pathExists path then
|
||||||
_emptyWithoutBase
|
_emptyWithoutBase
|
||||||
else
|
else
|
||||||
@ -211,7 +208,6 @@ in {
|
|||||||
|
|
||||||
This variant is useful for tracing file sets in the Nix repl.
|
This variant is useful for tracing file sets in the Nix repl.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`fileset`
|
`fileset`
|
||||||
@ -248,15 +244,14 @@ in {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
trace = fileset:
|
trace =
|
||||||
|
fileset:
|
||||||
let
|
let
|
||||||
# "fileset" would be a better name, but that would clash with the argument name,
|
# "fileset" would be a better name, but that would clash with the argument name,
|
||||||
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
|
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
|
||||||
actualFileset = _coerce "lib.fileset.trace: Argument" fileset;
|
actualFileset = _coerce "lib.fileset.trace: Argument" fileset;
|
||||||
in
|
in
|
||||||
seq
|
seq (_printFileset actualFileset) (x: x);
|
||||||
(_printFileset actualFileset)
|
|
||||||
(x: x);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Incrementally evaluate and trace a file set in a pretty way.
|
Incrementally evaluate and trace a file set in a pretty way.
|
||||||
@ -268,7 +263,6 @@ in {
|
|||||||
|
|
||||||
This variant is useful for tracing file sets passed as arguments to other functions.
|
This variant is useful for tracing file sets passed as arguments to other functions.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`fileset`
|
`fileset`
|
||||||
@ -308,14 +302,14 @@ in {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
traceVal = fileset:
|
traceVal =
|
||||||
|
fileset:
|
||||||
let
|
let
|
||||||
# "fileset" would be a better name, but that would clash with the argument name,
|
# "fileset" would be a better name, but that would clash with the argument name,
|
||||||
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
|
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
|
||||||
actualFileset = _coerce "lib.fileset.traceVal: Argument" fileset;
|
actualFileset = _coerce "lib.fileset.traceVal: Argument" fileset;
|
||||||
in
|
in
|
||||||
seq
|
seq (_printFileset actualFileset)
|
||||||
(_printFileset actualFileset)
|
|
||||||
# We could also return the original fileset argument here,
|
# We could also return the original fileset argument here,
|
||||||
# but that would then duplicate work for consumers of the fileset, because then they have to coerce it again
|
# but that would then duplicate work for consumers of the fileset, because then they have to coerce it again
|
||||||
actualFileset;
|
actualFileset;
|
||||||
@ -423,7 +417,8 @@ in {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
toSource = {
|
toSource =
|
||||||
|
{
|
||||||
root,
|
root,
|
||||||
fileset,
|
fileset,
|
||||||
}:
|
}:
|
||||||
@ -448,8 +443,7 @@ in {
|
|||||||
lib.fileset.toSource: `root` (${toString root}) is a string-like value, but it should be a path instead.
|
lib.fileset.toSource: `root` (${toString root}) is a string-like value, but it should be a path instead.
|
||||||
Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
|
Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''lib.fileset.toSource: `root` is of type ${typeOf root}, but it should be a path instead.''
|
||||||
lib.fileset.toSource: `root` is of type ${typeOf root}, but it should be a path instead.''
|
|
||||||
# Currently all Nix paths have the same filesystem root, but this could change in the future.
|
# Currently all Nix paths have the same filesystem root, but this could change in the future.
|
||||||
# See also ../path/README.md
|
# See also ../path/README.md
|
||||||
else if !fileset._internalIsEmptyWithoutBase && rootFilesystemRoot != filesetFilesystemRoot then
|
else if !fileset._internalIsEmptyWithoutBase && rootFilesystemRoot != filesetFilesystemRoot then
|
||||||
@ -459,8 +453,7 @@ in {
|
|||||||
`fileset`: Filesystem root is "${toString filesetFilesystemRoot}"
|
`fileset`: Filesystem root is "${toString filesetFilesystemRoot}"
|
||||||
Different filesystem roots are not supported.''
|
Different filesystem roots are not supported.''
|
||||||
else if !pathExists root then
|
else if !pathExists root then
|
||||||
throw ''
|
throw ''lib.fileset.toSource: `root` (${toString root}) is a path that does not exist.''
|
||||||
lib.fileset.toSource: `root` (${toString root}) is a path that does not exist.''
|
|
||||||
else if pathType root != "directory" then
|
else if pathType root != "directory" then
|
||||||
throw ''
|
throw ''
|
||||||
lib.fileset.toSource: `root` (${toString root}) is a file, but it should be a directory instead. Potential solutions:
|
lib.fileset.toSource: `root` (${toString root}) is a file, but it should be a directory instead. Potential solutions:
|
||||||
@ -472,14 +465,12 @@ in {
|
|||||||
- Set `root` to ${toString fileset._internalBase} or any directory higher up. This changes the layout of the resulting store path.
|
- Set `root` to ${toString fileset._internalBase} or any directory higher up. This changes the layout of the resulting store path.
|
||||||
- Set `fileset` to a file set that cannot contain files outside the `root` (${toString root}). This could change the files included in the result.''
|
- Set `fileset` to a file set that cannot contain files outside the `root` (${toString root}). This could change the files included in the result.''
|
||||||
else
|
else
|
||||||
seq sourceFilter
|
seq sourceFilter cleanSourceWith {
|
||||||
cleanSourceWith {
|
|
||||||
name = "source";
|
name = "source";
|
||||||
src = root;
|
src = root;
|
||||||
filter = sourceFilter;
|
filter = sourceFilter;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
The list of file paths contained in the given file set.
|
The list of file paths contained in the given file set.
|
||||||
|
|
||||||
@ -494,7 +485,6 @@ in {
|
|||||||
|
|
||||||
The resulting list of files can be turned back into a file set using [`lib.fileset.unions`](#function-library-lib.fileset.unions).
|
The resulting list of files can be turned back into a file set using [`lib.fileset.unions`](#function-library-lib.fileset.unions).
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`fileset`
|
`fileset`
|
||||||
@ -521,8 +511,7 @@ in {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
toList = fileset:
|
toList = fileset: _toList (_coerce "lib.fileset.toList: Argument" fileset);
|
||||||
_toList (_coerce "lib.fileset.toList: Argument" fileset);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
The file set containing all files that are in either of two given file sets.
|
The file set containing all files that are in either of two given file sets.
|
||||||
@ -533,7 +522,6 @@ in {
|
|||||||
The given file sets are evaluated as lazily as possible,
|
The given file sets are evaluated as lazily as possible,
|
||||||
with the first argument being evaluated first if needed.
|
with the first argument being evaluated first if needed.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`fileset1`
|
`fileset1`
|
||||||
@ -567,10 +555,9 @@ in {
|
|||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
union =
|
union =
|
||||||
fileset1:
|
fileset1: fileset2:
|
||||||
fileset2:
|
_unionMany (
|
||||||
_unionMany
|
_coerceMany "lib.fileset.union" [
|
||||||
(_coerceMany "lib.fileset.union" [
|
|
||||||
{
|
{
|
||||||
context = "First argument";
|
context = "First argument";
|
||||||
value = fileset1;
|
value = fileset1;
|
||||||
@ -579,7 +566,8 @@ in {
|
|||||||
context = "Second argument";
|
context = "Second argument";
|
||||||
value = fileset2;
|
value = fileset2;
|
||||||
}
|
}
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
The file set containing all files that are in any of the given file sets.
|
The file set containing all files that are in any of the given file sets.
|
||||||
@ -590,7 +578,6 @@ in {
|
|||||||
The given file sets are evaluated as lazily as possible,
|
The given file sets are evaluated as lazily as possible,
|
||||||
with earlier elements being evaluated first if needed.
|
with earlier elements being evaluated first if needed.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`filesets`
|
`filesets`
|
||||||
@ -632,15 +619,16 @@ in {
|
|||||||
unions =
|
unions =
|
||||||
filesets:
|
filesets:
|
||||||
if !isList filesets then
|
if !isList filesets then
|
||||||
throw ''
|
throw ''lib.fileset.unions: Argument is of type ${typeOf filesets}, but it should be a list instead.''
|
||||||
lib.fileset.unions: Argument is of type ${typeOf filesets}, but it should be a list instead.''
|
|
||||||
else
|
else
|
||||||
pipe filesets [
|
pipe filesets [
|
||||||
# Annotate the elements with context, used by _coerceMany for better errors
|
# Annotate the elements with context, used by _coerceMany for better errors
|
||||||
(imap0 (i: el: {
|
(imap0 (
|
||||||
|
i: el: {
|
||||||
context = "Element ${toString i}";
|
context = "Element ${toString i}";
|
||||||
value = el;
|
value = el;
|
||||||
}))
|
}
|
||||||
|
))
|
||||||
(_coerceMany "lib.fileset.unions")
|
(_coerceMany "lib.fileset.unions")
|
||||||
_unionMany
|
_unionMany
|
||||||
];
|
];
|
||||||
@ -652,7 +640,6 @@ in {
|
|||||||
The given file sets are evaluated as lazily as possible,
|
The given file sets are evaluated as lazily as possible,
|
||||||
with the first argument being evaluated first if needed.
|
with the first argument being evaluated first if needed.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`fileset1`
|
`fileset1`
|
||||||
@ -681,8 +668,7 @@ in {
|
|||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
intersection =
|
intersection =
|
||||||
fileset1:
|
fileset1: fileset2:
|
||||||
fileset2:
|
|
||||||
let
|
let
|
||||||
filesets = _coerceMany "lib.fileset.intersection" [
|
filesets = _coerceMany "lib.fileset.intersection" [
|
||||||
{
|
{
|
||||||
@ -695,9 +681,7 @@ in {
|
|||||||
}
|
}
|
||||||
];
|
];
|
||||||
in
|
in
|
||||||
_intersection
|
_intersection (elemAt filesets 0) (elemAt filesets 1);
|
||||||
(elemAt filesets 0)
|
|
||||||
(elemAt filesets 1);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
The file set containing all files from the first file set that are not in the second file set.
|
The file set containing all files from the first file set that are not in the second file set.
|
||||||
@ -706,7 +690,6 @@ in {
|
|||||||
The given file sets are evaluated as lazily as possible,
|
The given file sets are evaluated as lazily as possible,
|
||||||
with the first argument being evaluated first if needed.
|
with the first argument being evaluated first if needed.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`positive`
|
`positive`
|
||||||
@ -744,8 +727,7 @@ in {
|
|||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
difference =
|
difference =
|
||||||
positive:
|
positive: negative:
|
||||||
negative:
|
|
||||||
let
|
let
|
||||||
filesets = _coerceMany "lib.fileset.difference" [
|
filesets = _coerceMany "lib.fileset.difference" [
|
||||||
{
|
{
|
||||||
@ -758,14 +740,11 @@ in {
|
|||||||
}
|
}
|
||||||
];
|
];
|
||||||
in
|
in
|
||||||
_difference
|
_difference (elemAt filesets 0) (elemAt filesets 1);
|
||||||
(elemAt filesets 0)
|
|
||||||
(elemAt filesets 1);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Filter a file set to only contain files matching some predicate.
|
Filter a file set to only contain files matching some predicate.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`predicate`
|
`predicate`
|
||||||
@ -827,22 +806,18 @@ in {
|
|||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
fileFilter =
|
fileFilter =
|
||||||
predicate:
|
predicate: path:
|
||||||
path:
|
|
||||||
if !isFunction predicate then
|
if !isFunction predicate then
|
||||||
throw ''
|
throw ''lib.fileset.fileFilter: First argument is of type ${typeOf predicate}, but it should be a function instead.''
|
||||||
lib.fileset.fileFilter: First argument is of type ${typeOf predicate}, but it should be a function instead.''
|
|
||||||
else if !isPath path then
|
else if !isPath path then
|
||||||
if path._type or "" == "fileset" then
|
if path._type or "" == "fileset" then
|
||||||
throw ''
|
throw ''
|
||||||
lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
|
lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
|
||||||
If you need to filter files in a file set, use `intersection fileset (fileFilter pred ./.)` instead.''
|
If you need to filter files in a file set, use `intersection fileset (fileFilter pred ./.)` instead.''
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''lib.fileset.fileFilter: Second argument is of type ${typeOf path}, but it should be a path instead.''
|
||||||
lib.fileset.fileFilter: Second argument is of type ${typeOf path}, but it should be a path instead.''
|
|
||||||
else if !pathExists path then
|
else if !pathExists path then
|
||||||
throw ''
|
throw ''lib.fileset.fileFilter: Second argument (${toString path}) is a path that does not exist.''
|
||||||
lib.fileset.fileFilter: Second argument (${toString path}) is a path that does not exist.''
|
|
||||||
else
|
else
|
||||||
_fileFilter predicate path;
|
_fileFilter predicate path;
|
||||||
|
|
||||||
@ -859,7 +834,6 @@ in {
|
|||||||
Turning the result of this function back into a source using `toSource` will therefore not preserve empty directories.
|
Turning the result of this function back into a source using `toSource` will therefore not preserve empty directories.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`source`
|
`source`
|
||||||
@ -905,7 +879,8 @@ in {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
fromSource = source:
|
fromSource =
|
||||||
|
source:
|
||||||
let
|
let
|
||||||
# This function uses `._isLibCleanSourceWith`, `.origSrc` and `.filter`,
|
# This function uses `._isLibCleanSourceWith`, `.origSrc` and `.filter`,
|
||||||
# which are technically internal to lib.sources,
|
# which are technically internal to lib.sources,
|
||||||
@ -921,11 +896,9 @@ in {
|
|||||||
lib.fileset.fromSource: The source origin of the argument is a string-like value ("${toString path}"), but it should be a path instead.
|
lib.fileset.fromSource: The source origin of the argument is a string-like value ("${toString path}"), but it should be a path instead.
|
||||||
Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.''
|
Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.''
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
|
||||||
lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
|
|
||||||
else if !pathExists path then
|
else if !pathExists path then
|
||||||
throw ''
|
throw ''lib.fileset.fromSource: The source origin (${toString path}) of the argument is a path that does not exist.''
|
||||||
lib.fileset.fromSource: The source origin (${toString path}) of the argument is a path that does not exist.''
|
|
||||||
else if isFiltered then
|
else if isFiltered then
|
||||||
_fromSourceFilter path source.filter
|
_fromSourceFilter path source.filter
|
||||||
else
|
else
|
||||||
@ -937,7 +910,6 @@ in {
|
|||||||
|
|
||||||
This function behaves like [`gitTrackedWith { }`](#function-library-lib.fileset.gitTrackedWith) - using the defaults.
|
This function behaves like [`gitTrackedWith { }`](#function-library-lib.fileset.gitTrackedWith) - using the defaults.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`path`
|
`path`
|
||||||
@ -966,13 +938,7 @@ in {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
gitTracked =
|
gitTracked = path: _fromFetchGit "gitTracked" "argument" path { };
|
||||||
path:
|
|
||||||
_fromFetchGit
|
|
||||||
"gitTracked"
|
|
||||||
"argument"
|
|
||||||
path
|
|
||||||
{};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Create a file set containing all [Git-tracked files](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository) in a repository.
|
Create a file set containing all [Git-tracked files](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository) in a repository.
|
||||||
@ -998,7 +964,6 @@ in {
|
|||||||
This may change in the future.
|
This may change in the future.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`options` (attribute set)
|
`options` (attribute set)
|
||||||
@ -1038,14 +1003,13 @@ in {
|
|||||||
else if recurseSubmodules && versionOlder nixVersion _fetchGitSubmodulesMinver then
|
else if recurseSubmodules && versionOlder nixVersion _fetchGitSubmodulesMinver then
|
||||||
throw "lib.fileset.gitTrackedWith: Setting the attribute `recurseSubmodules` to `true` is only supported for Nix version ${_fetchGitSubmodulesMinver} and after, but Nix version ${nixVersion} is used."
|
throw "lib.fileset.gitTrackedWith: Setting the attribute `recurseSubmodules` to `true` is only supported for Nix version ${_fetchGitSubmodulesMinver} and after, but Nix version ${nixVersion} is used."
|
||||||
else
|
else
|
||||||
_fromFetchGit
|
_fromFetchGit "gitTrackedWith" "second argument" path
|
||||||
"gitTrackedWith"
|
|
||||||
"second argument"
|
|
||||||
path
|
|
||||||
# This is the only `fetchGit` parameter that makes sense in this context.
|
# This is the only `fetchGit` parameter that makes sense in this context.
|
||||||
# We can't just pass `submodules = recurseSubmodules` here because
|
# We can't just pass `submodules = recurseSubmodules` here because
|
||||||
# this would fail for Nix versions that don't support `submodules`.
|
# this would fail for Nix versions that don't support `submodules`.
|
||||||
(lib.optionalAttrs recurseSubmodules {
|
(
|
||||||
|
lib.optionalAttrs recurseSubmodules {
|
||||||
submodules = true;
|
submodules = true;
|
||||||
});
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
{ lib ? import ../. }:
|
{
|
||||||
|
lib ? import ../.,
|
||||||
|
}:
|
||||||
let
|
let
|
||||||
|
|
||||||
inherit (builtins)
|
inherit (builtins)
|
||||||
@ -87,7 +89,8 @@ rec {
|
|||||||
let
|
let
|
||||||
parts = splitRoot filesetV0._internalBase;
|
parts = splitRoot filesetV0._internalBase;
|
||||||
in
|
in
|
||||||
filesetV0 // {
|
filesetV0
|
||||||
|
// {
|
||||||
_internalVersion = 1;
|
_internalVersion = 1;
|
||||||
_internalBaseRoot = parts.root;
|
_internalBaseRoot = parts.root;
|
||||||
_internalBaseComponents = components parts.subpath;
|
_internalBaseComponents = components parts.subpath;
|
||||||
@ -98,7 +101,8 @@ rec {
|
|||||||
(
|
(
|
||||||
filesetV1:
|
filesetV1:
|
||||||
# This change is backwards compatible (but not forwards compatible, so we still need a new version)
|
# This change is backwards compatible (but not forwards compatible, so we still need a new version)
|
||||||
filesetV1 // {
|
filesetV1
|
||||||
|
// {
|
||||||
_internalVersion = 2;
|
_internalVersion = 2;
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -106,7 +110,8 @@ rec {
|
|||||||
# Convert v2 into v3: filesetTree's now have a representation for an empty file set without a base path
|
# Convert v2 into v3: filesetTree's now have a representation for an empty file set without a base path
|
||||||
(
|
(
|
||||||
filesetV2:
|
filesetV2:
|
||||||
filesetV2 // {
|
filesetV2
|
||||||
|
// {
|
||||||
# All v1 file sets are not the new empty file set
|
# All v1 file sets are not the new empty file set
|
||||||
_internalIsEmptyWithoutBase = false;
|
_internalIsEmptyWithoutBase = false;
|
||||||
_internalVersion = 3;
|
_internalVersion = 3;
|
||||||
@ -136,7 +141,8 @@ rec {
|
|||||||
|
|
||||||
# Create a fileset, see ./README.md#fileset
|
# Create a fileset, see ./README.md#fileset
|
||||||
# Type: path -> filesetTree -> fileset
|
# Type: path -> filesetTree -> fileset
|
||||||
_create = base: tree:
|
_create =
|
||||||
|
base: tree:
|
||||||
let
|
let
|
||||||
# Decompose the base into its components
|
# Decompose the base into its components
|
||||||
# See ../path/README.md for why we're not just using `toString`
|
# See ../path/README.md for why we're not just using `toString`
|
||||||
@ -162,7 +168,8 @@ rec {
|
|||||||
# Coerce a value to a fileset, erroring when the value cannot be coerced.
|
# Coerce a value to a fileset, erroring when the value cannot be coerced.
|
||||||
# The string gives the context for error messages.
|
# The string gives the context for error messages.
|
||||||
# Type: String -> (fileset | Path) -> fileset
|
# Type: String -> (fileset | Path) -> fileset
|
||||||
_coerce = context: value:
|
_coerce =
|
||||||
|
context: value:
|
||||||
if value._type or "" == "fileset" then
|
if value._type or "" == "fileset" then
|
||||||
if value._internalVersion > _currentVersion then
|
if value._internalVersion > _currentVersion then
|
||||||
throw ''
|
throw ''
|
||||||
@ -173,7 +180,9 @@ rec {
|
|||||||
else if value._internalVersion < _currentVersion then
|
else if value._internalVersion < _currentVersion then
|
||||||
let
|
let
|
||||||
# Get all the migration functions necessary to convert from the old to the current version
|
# Get all the migration functions necessary to convert from the old to the current version
|
||||||
migrationsToApply = sublist value._internalVersion (_currentVersion - value._internalVersion) migrations;
|
migrationsToApply = sublist value._internalVersion (
|
||||||
|
_currentVersion - value._internalVersion
|
||||||
|
) migrations;
|
||||||
in
|
in
|
||||||
foldl' (value: migration: migration value) value migrationsToApply
|
foldl' (value: migration: migration value) value migrationsToApply
|
||||||
else
|
else
|
||||||
@ -189,8 +198,7 @@ rec {
|
|||||||
${context} ("${toString value}") is a string-like value, but it should be a file set or a path instead.
|
${context} ("${toString value}") is a string-like value, but it should be a file set or a path instead.
|
||||||
Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
|
Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''${context} is of type ${typeOf value}, but it should be a file set or a path instead.''
|
||||||
${context} is of type ${typeOf value}, but it should be a file set or a path instead.''
|
|
||||||
else if !pathExists value then
|
else if !pathExists value then
|
||||||
throw ''
|
throw ''
|
||||||
${context} (${toString value}) is a path that does not exist.
|
${context} (${toString value}) is a path that does not exist.
|
||||||
@ -201,11 +209,10 @@ rec {
|
|||||||
# Coerce many values to filesets, erroring when any value cannot be coerced,
|
# Coerce many values to filesets, erroring when any value cannot be coerced,
|
||||||
# or if the filesystem root of the values doesn't match.
|
# or if the filesystem root of the values doesn't match.
|
||||||
# Type: String -> [ { context :: String, value :: fileset | Path } ] -> [ fileset ]
|
# Type: String -> [ { context :: String, value :: fileset | Path } ] -> [ fileset ]
|
||||||
_coerceMany = functionContext: list:
|
_coerceMany =
|
||||||
|
functionContext: list:
|
||||||
let
|
let
|
||||||
filesets = map ({ context, value }:
|
filesets = map ({ context, value }: _coerce "${functionContext}: ${context}" value) list;
|
||||||
_coerce "${functionContext}: ${context}" value
|
|
||||||
) list;
|
|
||||||
|
|
||||||
# Find the first value with a base, there may be none!
|
# Find the first value with a base, there may be none!
|
||||||
firstWithBase = findFirst (fileset: !fileset._internalIsEmptyWithoutBase) null filesets;
|
firstWithBase = findFirst (fileset: !fileset._internalIsEmptyWithoutBase) null filesets;
|
||||||
@ -213,10 +220,10 @@ rec {
|
|||||||
firstBaseRoot = firstWithBase._internalBaseRoot;
|
firstBaseRoot = firstWithBase._internalBaseRoot;
|
||||||
|
|
||||||
# Finds the first element with a filesystem root different than the first element, if any
|
# Finds the first element with a filesystem root different than the first element, if any
|
||||||
differentIndex = findFirstIndex (fileset:
|
differentIndex = findFirstIndex (
|
||||||
|
fileset:
|
||||||
# The empty value without a base doesn't have a base path
|
# The empty value without a base doesn't have a base path
|
||||||
! fileset._internalIsEmptyWithoutBase
|
!fileset._internalIsEmptyWithoutBase && firstBaseRoot != fileset._internalBaseRoot
|
||||||
&& firstBaseRoot != fileset._internalBaseRoot
|
|
||||||
) null filesets;
|
) null filesets;
|
||||||
in
|
in
|
||||||
# Only evaluates `differentIndex` if there are any elements with a base
|
# Only evaluates `differentIndex` if there are any elements with a base
|
||||||
@ -231,7 +238,8 @@ rec {
|
|||||||
|
|
||||||
# Create a file set from a path.
|
# Create a file set from a path.
|
||||||
# Type: Path -> fileset
|
# Type: Path -> fileset
|
||||||
_singleton = path:
|
_singleton =
|
||||||
|
path:
|
||||||
let
|
let
|
||||||
type = pathType path;
|
type = pathType path;
|
||||||
in
|
in
|
||||||
@ -244,21 +252,20 @@ rec {
|
|||||||
# "default.nix" = <type>;
|
# "default.nix" = <type>;
|
||||||
# }
|
# }
|
||||||
# See ./README.md#single-files
|
# See ./README.md#single-files
|
||||||
_create (dirOf path)
|
_create (dirOf path) {
|
||||||
{
|
|
||||||
${baseNameOf path} = type;
|
${baseNameOf path} = type;
|
||||||
};
|
};
|
||||||
|
|
||||||
# Expand a directory representation to an equivalent one in attribute set form.
|
# Expand a directory representation to an equivalent one in attribute set form.
|
||||||
# All directory entries are included in the result.
|
# All directory entries are included in the result.
|
||||||
# Type: Path -> filesetTree -> { <name> = filesetTree; }
|
# Type: Path -> filesetTree -> { <name> = filesetTree; }
|
||||||
_directoryEntries = path: value:
|
_directoryEntries =
|
||||||
|
path: value:
|
||||||
if value == "directory" then
|
if value == "directory" then
|
||||||
readDir path
|
readDir path
|
||||||
else
|
else
|
||||||
# Set all entries not present to null
|
# Set all entries not present to null
|
||||||
mapAttrs (name: value: null) (readDir path)
|
mapAttrs (name: value: null) (readDir path) // value;
|
||||||
// value;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
A normalisation of a filesetTree suitable filtering with `builtins.path`:
|
A normalisation of a filesetTree suitable filtering with `builtins.path`:
|
||||||
@ -271,7 +278,8 @@ rec {
|
|||||||
|
|
||||||
Type: Path -> filesetTree -> filesetTree
|
Type: Path -> filesetTree -> filesetTree
|
||||||
*/
|
*/
|
||||||
_normaliseTreeFilter = path: tree:
|
_normaliseTreeFilter =
|
||||||
|
path: tree:
|
||||||
if tree == "directory" || isAttrs tree then
|
if tree == "directory" || isAttrs tree then
|
||||||
let
|
let
|
||||||
entries = _directoryEntries path tree;
|
entries = _directoryEntries path tree;
|
||||||
@ -301,7 +309,8 @@ rec {
|
|||||||
|
|
||||||
Type: Path -> filesetTree -> filesetTree (with "emptyDir"'s)
|
Type: Path -> filesetTree -> filesetTree (with "emptyDir"'s)
|
||||||
*/
|
*/
|
||||||
_normaliseTreeMinimal = path: tree:
|
_normaliseTreeMinimal =
|
||||||
|
path: tree:
|
||||||
if tree == "directory" || isAttrs tree then
|
if tree == "directory" || isAttrs tree then
|
||||||
let
|
let
|
||||||
entries = _directoryEntries path tree;
|
entries = _directoryEntries path tree;
|
||||||
@ -334,9 +343,11 @@ rec {
|
|||||||
# Trace a filesetTree in a pretty way when the resulting value is evaluated.
|
# Trace a filesetTree in a pretty way when the resulting value is evaluated.
|
||||||
# This can handle both normal filesetTree's, and ones returned from _normaliseTreeMinimal
|
# This can handle both normal filesetTree's, and ones returned from _normaliseTreeMinimal
|
||||||
# Type: Path -> filesetTree (with "emptyDir"'s) -> Null
|
# Type: Path -> filesetTree (with "emptyDir"'s) -> Null
|
||||||
_printMinimalTree = base: tree:
|
_printMinimalTree =
|
||||||
|
base: tree:
|
||||||
let
|
let
|
||||||
treeSuffix = tree:
|
treeSuffix =
|
||||||
|
tree:
|
||||||
if isAttrs tree then
|
if isAttrs tree then
|
||||||
""
|
""
|
||||||
else if tree == "directory" then
|
else if tree == "directory" then
|
||||||
@ -349,14 +360,15 @@ rec {
|
|||||||
" (${tree})";
|
" (${tree})";
|
||||||
|
|
||||||
# Only for attribute set trees
|
# Only for attribute set trees
|
||||||
traceTreeAttrs = prevLine: indent: tree:
|
traceTreeAttrs =
|
||||||
foldl' (prevLine: name:
|
prevLine: indent: tree:
|
||||||
|
foldl' (
|
||||||
|
prevLine: name:
|
||||||
let
|
let
|
||||||
subtree = tree.${name};
|
subtree = tree.${name};
|
||||||
|
|
||||||
# Evaluating this prints the line for this subtree
|
# Evaluating this prints the line for this subtree
|
||||||
thisLine =
|
thisLine = trace "${indent}- ${name}${treeSuffix subtree}" prevLine;
|
||||||
trace "${indent}- ${name}${treeSuffix subtree}" prevLine;
|
|
||||||
in
|
in
|
||||||
if subtree == null || subtree == "emptyDir" then
|
if subtree == null || subtree == "emptyDir" then
|
||||||
# Don't print anything at all if this subtree is empty
|
# Don't print anything at all if this subtree is empty
|
||||||
@ -378,24 +390,24 @@ rec {
|
|||||||
else
|
else
|
||||||
trace "${toString base}${treeSuffix tree}" null;
|
trace "${toString base}${treeSuffix tree}" null;
|
||||||
in
|
in
|
||||||
if isAttrs tree then
|
if isAttrs tree then traceTreeAttrs firstLine "" tree else firstLine;
|
||||||
traceTreeAttrs firstLine "" tree
|
|
||||||
else
|
|
||||||
firstLine;
|
|
||||||
|
|
||||||
# Pretty-print a file set in a pretty way when the resulting value is evaluated
|
# Pretty-print a file set in a pretty way when the resulting value is evaluated
|
||||||
# Type: fileset -> Null
|
# Type: fileset -> Null
|
||||||
_printFileset = fileset:
|
_printFileset =
|
||||||
|
fileset:
|
||||||
if fileset._internalIsEmptyWithoutBase then
|
if fileset._internalIsEmptyWithoutBase then
|
||||||
trace "(empty)" null
|
trace "(empty)" null
|
||||||
else
|
else
|
||||||
_printMinimalTree fileset._internalBase
|
_printMinimalTree fileset._internalBase (
|
||||||
(_normaliseTreeMinimal fileset._internalBase fileset._internalTree);
|
_normaliseTreeMinimal fileset._internalBase fileset._internalTree
|
||||||
|
);
|
||||||
|
|
||||||
# Turn a fileset into a source filter function suitable for `builtins.path`
|
# Turn a fileset into a source filter function suitable for `builtins.path`
|
||||||
# Only directories recursively containing at least one files are recursed into
|
# Only directories recursively containing at least one files are recursed into
|
||||||
# Type: fileset -> (String -> String -> Bool)
|
# Type: fileset -> (String -> String -> Bool)
|
||||||
_toSourceFilter = fileset:
|
_toSourceFilter =
|
||||||
|
fileset:
|
||||||
let
|
let
|
||||||
# Simplify the tree, necessary to make sure all empty directories are null
|
# Simplify the tree, necessary to make sure all empty directories are null
|
||||||
# which has the effect that they aren't included in the result
|
# which has the effect that they aren't included in the result
|
||||||
@ -414,9 +426,11 @@ rec {
|
|||||||
# Check whether a list of path components under the base path exists in the tree.
|
# Check whether a list of path components under the base path exists in the tree.
|
||||||
# This function is called often, so it should be fast.
|
# This function is called often, so it should be fast.
|
||||||
# Type: [ String ] -> Bool
|
# Type: [ String ] -> Bool
|
||||||
inTree = components:
|
inTree =
|
||||||
|
components:
|
||||||
let
|
let
|
||||||
recurse = index: localTree:
|
recurse =
|
||||||
|
index: localTree:
|
||||||
if isAttrs localTree then
|
if isAttrs localTree then
|
||||||
# We have an attribute set, meaning this is a directory with at least one file
|
# We have an attribute set, meaning this is a directory with at least one file
|
||||||
if index >= length components then
|
if index >= length components then
|
||||||
@ -431,7 +445,8 @@ rec {
|
|||||||
# If it's not an attribute set it can only be either null (in which case it's not included)
|
# If it's not an attribute set it can only be either null (in which case it's not included)
|
||||||
# or a string ("directory" or "regular", etc.) in which case it's included
|
# or a string ("directory" or "regular", etc.) in which case it's included
|
||||||
localTree != null;
|
localTree != null;
|
||||||
in recurse 0 tree;
|
in
|
||||||
|
recurse 0 tree;
|
||||||
|
|
||||||
# Filter suited when there's no files
|
# Filter suited when there's no files
|
||||||
empty = _: _: false;
|
empty = _: _: false;
|
||||||
@ -483,16 +498,14 @@ rec {
|
|||||||
# Special case because the code below assumes that the _internalBase is always included in the result
|
# Special case because the code below assumes that the _internalBase is always included in the result
|
||||||
# which shouldn't be done when we have no files at all in the base
|
# which shouldn't be done when we have no files at all in the base
|
||||||
# This also forces the tree before returning the filter, leads to earlier error messages
|
# This also forces the tree before returning the filter, leads to earlier error messages
|
||||||
if fileset._internalIsEmptyWithoutBase || tree == null then
|
if fileset._internalIsEmptyWithoutBase || tree == null then empty else nonEmpty;
|
||||||
empty
|
|
||||||
else
|
|
||||||
nonEmpty;
|
|
||||||
|
|
||||||
# Turn a builtins.filterSource-based source filter on a root path into a file set
|
# Turn a builtins.filterSource-based source filter on a root path into a file set
|
||||||
# containing only files included by the filter.
|
# containing only files included by the filter.
|
||||||
# The filter is lazily called as necessary to determine whether paths are included
|
# The filter is lazily called as necessary to determine whether paths are included
|
||||||
# Type: Path -> (String -> String -> Bool) -> fileset
|
# Type: Path -> (String -> String -> Bool) -> fileset
|
||||||
_fromSourceFilter = root: sourceFilter:
|
_fromSourceFilter =
|
||||||
|
root: sourceFilter:
|
||||||
let
|
let
|
||||||
# During the recursion we need to track both:
|
# During the recursion we need to track both:
|
||||||
# - The path value such that we can safely call `readDir` on it
|
# - The path value such that we can safely call `readDir` on it
|
||||||
@ -503,7 +516,8 @@ rec {
|
|||||||
# which is a fairly expensive operation
|
# which is a fairly expensive operation
|
||||||
|
|
||||||
# Create a file set from a directory entry
|
# Create a file set from a directory entry
|
||||||
fromDirEntry = path: pathString: type:
|
fromDirEntry =
|
||||||
|
path: pathString: type:
|
||||||
# The filter needs to run on the path as a string
|
# The filter needs to run on the path as a string
|
||||||
if !sourceFilter pathString type then
|
if !sourceFilter pathString type then
|
||||||
null
|
null
|
||||||
@ -513,7 +527,8 @@ rec {
|
|||||||
type;
|
type;
|
||||||
|
|
||||||
# Create a file set from a directory
|
# Create a file set from a directory
|
||||||
fromDir = path: pathString:
|
fromDir =
|
||||||
|
path: pathString:
|
||||||
mapAttrs
|
mapAttrs
|
||||||
# This looks a bit funny, but we need both the path-based and the path string-based values
|
# This looks a bit funny, but we need both the path-based and the path string-based values
|
||||||
(name: fromDirEntry (path + "/${name}") (pathString + "/${name}"))
|
(name: fromDirEntry (path + "/${name}") (pathString + "/${name}"))
|
||||||
@ -536,20 +551,19 @@ rec {
|
|||||||
else
|
else
|
||||||
# Direct files are always included by builtins.path without calling the filter
|
# Direct files are always included by builtins.path without calling the filter
|
||||||
# But we need to lift up the base path to its parent to satisfy the base path invariant
|
# But we need to lift up the base path to its parent to satisfy the base path invariant
|
||||||
_create (dirOf root)
|
_create (dirOf root) {
|
||||||
{
|
|
||||||
${baseNameOf root} = rootPathType;
|
${baseNameOf root} = rootPathType;
|
||||||
};
|
};
|
||||||
|
|
||||||
# Turns a file set into the list of file paths it includes.
|
# Turns a file set into the list of file paths it includes.
|
||||||
# Type: fileset -> [ Path ]
|
# Type: fileset -> [ Path ]
|
||||||
_toList = fileset:
|
_toList =
|
||||||
|
fileset:
|
||||||
let
|
let
|
||||||
recurse = path: tree:
|
recurse =
|
||||||
|
path: tree:
|
||||||
if isAttrs tree then
|
if isAttrs tree then
|
||||||
concatLists (mapAttrsToList (name: value:
|
concatLists (mapAttrsToList (name: value: recurse (path + "/${name}") value) tree)
|
||||||
recurse (path + "/${name}") value
|
|
||||||
) tree)
|
|
||||||
else if tree == "directory" then
|
else if tree == "directory" then
|
||||||
recurse path (readDir path)
|
recurse path (readDir path)
|
||||||
else if tree == null then
|
else if tree == null then
|
||||||
@ -565,9 +579,11 @@ rec {
|
|||||||
# Transforms the filesetTree of a file set to a shorter base path, e.g.
|
# Transforms the filesetTree of a file set to a shorter base path, e.g.
|
||||||
# _shortenTreeBase [ "foo" ] (_create /foo/bar null)
|
# _shortenTreeBase [ "foo" ] (_create /foo/bar null)
|
||||||
# => { bar = null; }
|
# => { bar = null; }
|
||||||
_shortenTreeBase = targetBaseComponents: fileset:
|
_shortenTreeBase =
|
||||||
|
targetBaseComponents: fileset:
|
||||||
let
|
let
|
||||||
recurse = index:
|
recurse =
|
||||||
|
index:
|
||||||
# If we haven't reached the required depth yet
|
# If we haven't reached the required depth yet
|
||||||
if index < length fileset._internalBaseComponents then
|
if index < length fileset._internalBaseComponents then
|
||||||
# Create an attribute set and recurse as the value, this can be lazily evaluated this way
|
# Create an attribute set and recurse as the value, this can be lazily evaluated this way
|
||||||
@ -581,9 +597,11 @@ rec {
|
|||||||
# Transforms the filesetTree of a file set to a longer base path, e.g.
|
# Transforms the filesetTree of a file set to a longer base path, e.g.
|
||||||
# _lengthenTreeBase [ "foo" "bar" ] (_create /foo { bar.baz = "regular"; })
|
# _lengthenTreeBase [ "foo" "bar" ] (_create /foo { bar.baz = "regular"; })
|
||||||
# => { baz = "regular"; }
|
# => { baz = "regular"; }
|
||||||
_lengthenTreeBase = targetBaseComponents: fileset:
|
_lengthenTreeBase =
|
||||||
|
targetBaseComponents: fileset:
|
||||||
let
|
let
|
||||||
recurse = index: tree:
|
recurse =
|
||||||
|
index: tree:
|
||||||
# If the filesetTree is an attribute set and we haven't reached the required depth yet
|
# If the filesetTree is an attribute set and we haven't reached the required depth yet
|
||||||
if isAttrs tree && index < length targetBaseComponents then
|
if isAttrs tree && index < length targetBaseComponents then
|
||||||
# Recurse with the tree under the right component (which might not exist)
|
# Recurse with the tree under the right component (which might not exist)
|
||||||
@ -602,7 +620,8 @@ rec {
|
|||||||
# Computes the union of a list of filesets.
|
# Computes the union of a list of filesets.
|
||||||
# The filesets must already be coerced and validated to be in the same filesystem root
|
# The filesets must already be coerced and validated to be in the same filesystem root
|
||||||
# Type: [ Fileset ] -> Fileset
|
# Type: [ Fileset ] -> Fileset
|
||||||
_unionMany = filesets:
|
_unionMany =
|
||||||
|
filesets:
|
||||||
let
|
let
|
||||||
# All filesets that have a base, aka not the ones that are the empty value without a base
|
# All filesets that have a base, aka not the ones that are the empty value without a base
|
||||||
filesetsWithBase = filter (fileset: !fileset._internalIsEmptyWithoutBase) filesets;
|
filesetsWithBase = filter (fileset: !fileset._internalIsEmptyWithoutBase) filesets;
|
||||||
@ -618,8 +637,8 @@ rec {
|
|||||||
# A list of path components common to all base paths.
|
# A list of path components common to all base paths.
|
||||||
# Note that commonPrefix can only be fully evaluated,
|
# Note that commonPrefix can only be fully evaluated,
|
||||||
# so this cannot cause a stack overflow due to a build-up of unevaluated thunks.
|
# so this cannot cause a stack overflow due to a build-up of unevaluated thunks.
|
||||||
commonBaseComponents = foldl'
|
commonBaseComponents =
|
||||||
(components: el: commonPrefix components el._internalBaseComponents)
|
foldl' (components: el: commonPrefix components el._internalBaseComponents)
|
||||||
firstWithBase._internalBaseComponents
|
firstWithBase._internalBaseComponents
|
||||||
# We could also not do the `tail` here to avoid a list allocation,
|
# We could also not do the `tail` here to avoid a list allocation,
|
||||||
# but then we'd have to pay for a potentially expensive
|
# but then we'd have to pay for a potentially expensive
|
||||||
@ -643,15 +662,13 @@ rec {
|
|||||||
resultTree = _unionTrees trees;
|
resultTree = _unionTrees trees;
|
||||||
in
|
in
|
||||||
# If there's no values with a base, we have no files
|
# If there's no values with a base, we have no files
|
||||||
if filesetsWithBase == [ ] then
|
if filesetsWithBase == [ ] then _emptyWithoutBase else _create commonBase resultTree;
|
||||||
_emptyWithoutBase
|
|
||||||
else
|
|
||||||
_create commonBase resultTree;
|
|
||||||
|
|
||||||
# The union of multiple filesetTree's with the same base path.
|
# The union of multiple filesetTree's with the same base path.
|
||||||
# Later elements are only evaluated if necessary.
|
# Later elements are only evaluated if necessary.
|
||||||
# Type: [ filesetTree ] -> filesetTree
|
# Type: [ filesetTree ] -> filesetTree
|
||||||
_unionTrees = trees:
|
_unionTrees =
|
||||||
|
trees:
|
||||||
let
|
let
|
||||||
stringIndex = findFirstIndex isString null trees;
|
stringIndex = findFirstIndex isString null trees;
|
||||||
withoutNull = filter (tree: tree != null) trees;
|
withoutNull = filter (tree: tree != null) trees;
|
||||||
@ -671,18 +688,15 @@ rec {
|
|||||||
# Computes the intersection of a list of filesets.
|
# Computes the intersection of a list of filesets.
|
||||||
# The filesets must already be coerced and validated to be in the same filesystem root
|
# The filesets must already be coerced and validated to be in the same filesystem root
|
||||||
# Type: Fileset -> Fileset -> Fileset
|
# Type: Fileset -> Fileset -> Fileset
|
||||||
_intersection = fileset1: fileset2:
|
_intersection =
|
||||||
|
fileset1: fileset2:
|
||||||
let
|
let
|
||||||
# The common base components prefix, e.g.
|
# The common base components prefix, e.g.
|
||||||
# (/foo/bar, /foo/bar/baz) -> /foo/bar
|
# (/foo/bar, /foo/bar/baz) -> /foo/bar
|
||||||
# (/foo/bar, /foo/baz) -> /foo
|
# (/foo/bar, /foo/baz) -> /foo
|
||||||
commonBaseComponentsLength =
|
commonBaseComponentsLength =
|
||||||
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
|
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
|
||||||
length (
|
length (commonPrefix fileset1._internalBaseComponents fileset2._internalBaseComponents);
|
||||||
commonPrefix
|
|
||||||
fileset1._internalBaseComponents
|
|
||||||
fileset2._internalBaseComponents
|
|
||||||
);
|
|
||||||
|
|
||||||
# To be able to intersect filesetTree's together, they need to have the same base path.
|
# To be able to intersect filesetTree's together, they need to have the same base path.
|
||||||
# Base paths can be intersected by taking the longest one (if any)
|
# Base paths can be intersected by taking the longest one (if any)
|
||||||
@ -725,12 +739,11 @@ rec {
|
|||||||
# The intersection of two filesetTree's with the same base path
|
# The intersection of two filesetTree's with the same base path
|
||||||
# The second element is only evaluated as much as necessary.
|
# The second element is only evaluated as much as necessary.
|
||||||
# Type: filesetTree -> filesetTree -> filesetTree
|
# Type: filesetTree -> filesetTree -> filesetTree
|
||||||
_intersectTree = lhs: rhs:
|
_intersectTree =
|
||||||
|
lhs: rhs:
|
||||||
if isAttrs lhs && isAttrs rhs then
|
if isAttrs lhs && isAttrs rhs then
|
||||||
# Both sides are attribute sets, we can recurse for the attributes existing on both sides
|
# Both sides are attribute sets, we can recurse for the attributes existing on both sides
|
||||||
mapAttrs
|
mapAttrs (name: _intersectTree lhs.${name}) (builtins.intersectAttrs lhs rhs)
|
||||||
(name: _intersectTree lhs.${name})
|
|
||||||
(builtins.intersectAttrs lhs rhs)
|
|
||||||
else if lhs == null || isString rhs then
|
else if lhs == null || isString rhs then
|
||||||
# If the lhs is null, the result should also be null
|
# If the lhs is null, the result should also be null
|
||||||
# And if the rhs is the identity element
|
# And if the rhs is the identity element
|
||||||
@ -743,18 +756,15 @@ rec {
|
|||||||
# Compute the set difference between two file sets.
|
# Compute the set difference between two file sets.
|
||||||
# The filesets must already be coerced and validated to be in the same filesystem root.
|
# The filesets must already be coerced and validated to be in the same filesystem root.
|
||||||
# Type: Fileset -> Fileset -> Fileset
|
# Type: Fileset -> Fileset -> Fileset
|
||||||
_difference = positive: negative:
|
_difference =
|
||||||
|
positive: negative:
|
||||||
let
|
let
|
||||||
# The common base components prefix, e.g.
|
# The common base components prefix, e.g.
|
||||||
# (/foo/bar, /foo/bar/baz) -> /foo/bar
|
# (/foo/bar, /foo/bar/baz) -> /foo/bar
|
||||||
# (/foo/bar, /foo/baz) -> /foo
|
# (/foo/bar, /foo/baz) -> /foo
|
||||||
commonBaseComponentsLength =
|
commonBaseComponentsLength =
|
||||||
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
|
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
|
||||||
length (
|
length (commonPrefix positive._internalBaseComponents negative._internalBaseComponents);
|
||||||
commonPrefix
|
|
||||||
positive._internalBaseComponents
|
|
||||||
negative._internalBaseComponents
|
|
||||||
);
|
|
||||||
|
|
||||||
# We need filesetTree's with the same base to be able to compute the difference between them
|
# We need filesetTree's with the same base to be able to compute the difference between them
|
||||||
# This here is the filesetTree from the negative file set, but for a base path that matches the positive file set.
|
# This here is the filesetTree from the negative file set, but for a base path that matches the positive file set.
|
||||||
@ -786,9 +796,7 @@ rec {
|
|||||||
null;
|
null;
|
||||||
|
|
||||||
resultingTree =
|
resultingTree =
|
||||||
_differenceTree
|
_differenceTree positive._internalBase positive._internalTree
|
||||||
positive._internalBase
|
|
||||||
positive._internalTree
|
|
||||||
negativeTreeWithPositiveBase;
|
negativeTreeWithPositiveBase;
|
||||||
in
|
in
|
||||||
# If the first file set is empty, we can never have any files in the result
|
# If the first file set is empty, we can never have any files in the result
|
||||||
@ -805,7 +813,8 @@ rec {
|
|||||||
|
|
||||||
# Computes the set difference of two filesetTree's
|
# Computes the set difference of two filesetTree's
|
||||||
# Type: Path -> filesetTree -> filesetTree
|
# Type: Path -> filesetTree -> filesetTree
|
||||||
_differenceTree = path: lhs: rhs:
|
_differenceTree =
|
||||||
|
path: lhs: rhs:
|
||||||
# If the lhs doesn't have any files, or the right hand side includes all files
|
# If the lhs doesn't have any files, or the right hand side includes all files
|
||||||
if lhs == null || isString rhs then
|
if lhs == null || isString rhs then
|
||||||
# The result will always be empty
|
# The result will always be empty
|
||||||
@ -816,17 +825,19 @@ rec {
|
|||||||
lhs
|
lhs
|
||||||
else
|
else
|
||||||
# Otherwise we always have two attribute sets to recurse into
|
# Otherwise we always have two attribute sets to recurse into
|
||||||
mapAttrs (name: lhsValue:
|
mapAttrs (name: lhsValue: _differenceTree (path + "/${name}") lhsValue (rhs.${name} or null)) (
|
||||||
_differenceTree (path + "/${name}") lhsValue (rhs.${name} or null)
|
_directoryEntries path lhs
|
||||||
) (_directoryEntries path lhs);
|
);
|
||||||
|
|
||||||
# Filters all files in a path based on a predicate
|
# Filters all files in a path based on a predicate
|
||||||
# Type: ({ name, type, ... } -> Bool) -> Path -> FileSet
|
# Type: ({ name, type, ... } -> Bool) -> Path -> FileSet
|
||||||
_fileFilter = predicate: root:
|
_fileFilter =
|
||||||
|
predicate: root:
|
||||||
let
|
let
|
||||||
# Check the predicate for a single file
|
# Check the predicate for a single file
|
||||||
# Type: String -> String -> filesetTree
|
# Type: String -> String -> filesetTree
|
||||||
fromFile = name: type:
|
fromFile =
|
||||||
|
name: type:
|
||||||
if
|
if
|
||||||
predicate {
|
predicate {
|
||||||
inherit name type;
|
inherit name type;
|
||||||
@ -834,7 +845,8 @@ rec {
|
|||||||
|
|
||||||
# To ensure forwards compatibility with more arguments being added in the future,
|
# To ensure forwards compatibility with more arguments being added in the future,
|
||||||
# adding an attribute which can't be deconstructed :)
|
# adding an attribute which can't be deconstructed :)
|
||||||
"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you're using `{ name, file, hasExt }:`, use `{ name, file, hasExt, ... }:` instead." = null;
|
"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you're using `{ name, file, hasExt }:`, use `{ name, file, hasExt, ... }:` instead." =
|
||||||
|
null;
|
||||||
}
|
}
|
||||||
then
|
then
|
||||||
type
|
type
|
||||||
@ -843,12 +855,10 @@ rec {
|
|||||||
|
|
||||||
# Check the predicate for all files in a directory
|
# Check the predicate for all files in a directory
|
||||||
# Type: Path -> filesetTree
|
# Type: Path -> filesetTree
|
||||||
fromDir = path:
|
fromDir =
|
||||||
mapAttrs (name: type:
|
path:
|
||||||
if type == "directory" then
|
mapAttrs (
|
||||||
fromDir (path + "/${name}")
|
name: type: if type == "directory" then fromDir (path + "/${name}") else fromFile name type
|
||||||
else
|
|
||||||
fromFile name type
|
|
||||||
) (readDir path);
|
) (readDir path);
|
||||||
|
|
||||||
rootType = pathType root;
|
rootType = pathType root;
|
||||||
@ -858,8 +868,7 @@ rec {
|
|||||||
else
|
else
|
||||||
# Single files are turned into a directory containing that file or nothing.
|
# Single files are turned into a directory containing that file or nothing.
|
||||||
_create (dirOf root) {
|
_create (dirOf root) {
|
||||||
${baseNameOf root} =
|
${baseNameOf root} = fromFile (baseNameOf root) rootType;
|
||||||
fromFile (baseNameOf root) rootType;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
# Support for `builtins.fetchGit` with `submodules = true` was introduced in 2.4
|
# Support for `builtins.fetchGit` with `submodules = true` was introduced in 2.4
|
||||||
@ -876,22 +885,21 @@ rec {
|
|||||||
# - The store path must not include files that don't exist in the respective local path.
|
# - The store path must not include files that don't exist in the respective local path.
|
||||||
#
|
#
|
||||||
# Type: Path -> String -> FileSet
|
# Type: Path -> String -> FileSet
|
||||||
_mirrorStorePath = localPath: storePath:
|
_mirrorStorePath =
|
||||||
|
localPath: storePath:
|
||||||
let
|
let
|
||||||
recurse = focusedStorePath:
|
recurse =
|
||||||
mapAttrs (name: type:
|
focusedStorePath:
|
||||||
if type == "directory" then
|
mapAttrs (
|
||||||
recurse (focusedStorePath + "/${name}")
|
name: type: if type == "directory" then recurse (focusedStorePath + "/${name}") else type
|
||||||
else
|
|
||||||
type
|
|
||||||
) (builtins.readDir focusedStorePath);
|
) (builtins.readDir focusedStorePath);
|
||||||
in
|
in
|
||||||
_create localPath
|
_create localPath (recurse storePath);
|
||||||
(recurse storePath);
|
|
||||||
|
|
||||||
# Create a file set from the files included in the result of a fetchGit call
|
# Create a file set from the files included in the result of a fetchGit call
|
||||||
# Type: String -> String -> Path -> Attrs -> FileSet
|
# Type: String -> String -> Path -> Attrs -> FileSet
|
||||||
_fromFetchGit = function: argument: path: extraFetchGitAttrs:
|
_fromFetchGit =
|
||||||
|
function: argument: path: extraFetchGitAttrs:
|
||||||
let
|
let
|
||||||
# The code path for when isStorePath is true
|
# The code path for when isStorePath is true
|
||||||
tryStorePath =
|
tryStorePath =
|
||||||
@ -922,7 +930,8 @@ rec {
|
|||||||
# With the [lazy trees PR](https://github.com/NixOS/nix/pull/6530),
|
# With the [lazy trees PR](https://github.com/NixOS/nix/pull/6530),
|
||||||
# the unnecessarily import could be avoided.
|
# the unnecessarily import could be avoided.
|
||||||
# However a simpler alternative still would be [a builtins.gitLsFiles](https://github.com/NixOS/nix/issues/2944).
|
# However a simpler alternative still would be [a builtins.gitLsFiles](https://github.com/NixOS/nix/issues/2944).
|
||||||
fetchResult = fetchGit ({
|
fetchResult = fetchGit (
|
||||||
|
{
|
||||||
url = path;
|
url = path;
|
||||||
}
|
}
|
||||||
# In older Nix versions, repositories were always assumed to be deep clones, which made `fetchGit` fail for shallow clones
|
# In older Nix versions, repositories were always assumed to be deep clones, which made `fetchGit` fail for shallow clones
|
||||||
@ -934,7 +943,8 @@ rec {
|
|||||||
# Checking for `.git/shallow` doesn't seem worth it, especially since that's more of an implementation detail,
|
# Checking for `.git/shallow` doesn't seem worth it, especially since that's more of an implementation detail,
|
||||||
# and would also require more code to handle worktrees where `.git` is a file.
|
# and would also require more code to handle worktrees where `.git` is a file.
|
||||||
// optionalAttrs (versionAtLeast nixVersion _fetchGitShallowMinver) { shallow = true; }
|
// optionalAttrs (versionAtLeast nixVersion _fetchGitShallowMinver) { shallow = true; }
|
||||||
// extraFetchGitAttrs);
|
// extraFetchGitAttrs
|
||||||
|
);
|
||||||
in
|
in
|
||||||
# We can identify local working directories by checking for .git,
|
# We can identify local working directories by checking for .git,
|
||||||
# see https://git-scm.com/docs/gitrepository-layout#_description.
|
# see https://git-scm.com/docs/gitrepository-layout#_description.
|
||||||
|
@ -8,18 +8,21 @@
|
|||||||
# }
|
# }
|
||||||
self: super: {
|
self: super: {
|
||||||
path = super.path // {
|
path = super.path // {
|
||||||
splitRoot = path:
|
splitRoot =
|
||||||
|
path:
|
||||||
let
|
let
|
||||||
parts = super.path.splitRoot path;
|
parts = super.path.splitRoot path;
|
||||||
components = self.path.subpath.components parts.subpath;
|
components = self.path.subpath.components parts.subpath;
|
||||||
count = self.length components;
|
count = self.length components;
|
||||||
rootIndex = count - self.lists.findFirstIndex
|
rootIndex =
|
||||||
(component: component == "mock-root")
|
count
|
||||||
(self.length components)
|
- self.lists.findFirstIndex (component: component == "mock-root") (self.length components) (
|
||||||
(self.reverseList components);
|
self.reverseList components
|
||||||
|
);
|
||||||
root = self.path.append parts.root (self.path.subpath.join (self.take rootIndex components));
|
root = self.path.append parts.root (self.path.subpath.join (self.take rootIndex components));
|
||||||
subpath = self.path.subpath.join (self.drop rootIndex components);
|
subpath = self.path.subpath.join (self.drop rootIndex components);
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
inherit root subpath;
|
inherit root subpath;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -13,8 +13,9 @@ finalLib: prevLib: # lib overlay
|
|||||||
|
|
||||||
{
|
{
|
||||||
trivial = prevLib.trivial // {
|
trivial = prevLib.trivial // {
|
||||||
versionSuffix =
|
versionSuffix = ".${
|
||||||
".${finalLib.substring 0 8 (self.lastModifiedDate or "19700101")}.${self.shortRev or "dirty"}";
|
finalLib.substring 0 8 (self.lastModifiedDate or "19700101")
|
||||||
|
}.${self.shortRev or "dirty"}";
|
||||||
revisionWithDefault = default: self.rev or default;
|
revisionWithDefault = default: self.rev or default;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
{
|
{
|
||||||
description = "Library of low-level helper functions for nix expressions.";
|
description = "Library of low-level helper functions for nix expressions.";
|
||||||
|
|
||||||
outputs = { self }:
|
outputs =
|
||||||
|
{ self }:
|
||||||
let
|
let
|
||||||
lib0 = import ./.;
|
lib0 = import ./.;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
lib = lib0.extend (import ./flake-version-info.nix self);
|
lib = lib0.extend (import ./flake-version-info.nix self);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
133
lib/gvariant.nix
133
lib/gvariant.nix
@ -14,7 +14,12 @@
|
|||||||
|
|
||||||
let
|
let
|
||||||
inherit (lib)
|
inherit (lib)
|
||||||
concatMapStringsSep concatStrings escape head replaceStrings;
|
concatMapStringsSep
|
||||||
|
concatStrings
|
||||||
|
escape
|
||||||
|
head
|
||||||
|
replaceStrings
|
||||||
|
;
|
||||||
|
|
||||||
mkPrimitive = t: v: {
|
mkPrimitive = t: v: {
|
||||||
_type = "gvariant";
|
_type = "gvariant";
|
||||||
@ -49,7 +54,6 @@ rec {
|
|||||||
/**
|
/**
|
||||||
Check if a value is a GVariant value
|
Check if a value is a GVariant value
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`v`
|
`v`
|
||||||
@ -115,7 +119,6 @@ rec {
|
|||||||
Returns the GVariant value that most closely matches the given Nix value.
|
Returns the GVariant value that most closely matches the given Nix value.
|
||||||
If no GVariant value can be found unambiguously then error is thrown.
|
If no GVariant value can be found unambiguously then error is thrown.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`v`
|
`v`
|
||||||
@ -128,7 +131,8 @@ rec {
|
|||||||
mkValue :: Any -> gvariant
|
mkValue :: Any -> gvariant
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
mkValue = v:
|
mkValue =
|
||||||
|
v:
|
||||||
if builtins.isBool v then
|
if builtins.isBool v then
|
||||||
mkBoolean v
|
mkBoolean v
|
||||||
else if builtins.isFloat v then
|
else if builtins.isFloat v then
|
||||||
@ -141,13 +145,17 @@ rec {
|
|||||||
v
|
v
|
||||||
else if builtins.isInt v then
|
else if builtins.isInt v then
|
||||||
let
|
let
|
||||||
validConstructors = builtins.filter ({ min, max, ... }: (min == null || min <= v) && (max == null || v <= max)) intConstructors;
|
validConstructors = builtins.filter (
|
||||||
|
{ min, max, ... }: (min == null || min <= v) && (max == null || v <= max)
|
||||||
|
) intConstructors;
|
||||||
in
|
in
|
||||||
throw ''
|
throw ''
|
||||||
The GVariant type for number “${builtins.toString v}” is unclear.
|
The GVariant type for number “${builtins.toString v}” is unclear.
|
||||||
Please wrap the value with one of the following, depending on the value type in GSettings schema:
|
Please wrap the value with one of the following, depending on the value type in GSettings schema:
|
||||||
|
|
||||||
${lib.concatMapStringsSep "\n" ({ name, type, ...}: "- `lib.gvariant.${name}` for `${type}`") validConstructors}
|
${lib.concatMapStringsSep "\n" (
|
||||||
|
{ name, type, ... }: "- `lib.gvariant.${name}` for `${type}`"
|
||||||
|
) validConstructors}
|
||||||
''
|
''
|
||||||
else if builtins.isAttrs v then
|
else if builtins.isAttrs v then
|
||||||
throw "Cannot construct GVariant value from an attribute set. If you want to construct a dictionary, you will need to create an array containing items constructed with `lib.gvariant.mkDictionaryEntry`."
|
throw "Cannot construct GVariant value from an attribute set. If you want to construct a dictionary, you will need to create an array containing items constructed with `lib.gvariant.mkDictionaryEntry`."
|
||||||
@ -157,7 +165,6 @@ rec {
|
|||||||
/**
|
/**
|
||||||
Returns the GVariant array from the given type of the elements and a Nix list.
|
Returns the GVariant array from the given type of the elements and a Nix list.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`elems`
|
`elems`
|
||||||
@ -181,22 +188,22 @@ rec {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
mkArray = elems:
|
mkArray =
|
||||||
|
elems:
|
||||||
let
|
let
|
||||||
vs = map mkValue (lib.throwIf (elems == [ ]) "Please create empty array with mkEmptyArray." elems);
|
vs = map mkValue (lib.throwIf (elems == [ ]) "Please create empty array with mkEmptyArray." elems);
|
||||||
elemType = lib.throwIfNot (lib.all (t: (head vs).type == t) (map (v: v.type) vs))
|
elemType = lib.throwIfNot (lib.all (t: (head vs).type == t) (
|
||||||
"Elements in a list should have same type."
|
map (v: v.type) vs
|
||||||
(head vs).type;
|
)) "Elements in a list should have same type." (head vs).type;
|
||||||
in
|
in
|
||||||
mkPrimitive (type.arrayOf elemType) vs // {
|
mkPrimitive (type.arrayOf elemType) vs
|
||||||
__toString = self:
|
// {
|
||||||
"@${self.type} [${concatMapStringsSep "," toString self.value}]";
|
__toString = self: "@${self.type} [${concatMapStringsSep "," toString self.value}]";
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Returns the GVariant array from the given empty Nix list.
|
Returns the GVariant array from the given empty Nix list.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`elemType`
|
`elemType`
|
||||||
@ -220,16 +227,17 @@ rec {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
mkEmptyArray = elemType: mkPrimitive (type.arrayOf elemType) [ ] // {
|
mkEmptyArray =
|
||||||
|
elemType:
|
||||||
|
mkPrimitive (type.arrayOf elemType) [ ]
|
||||||
|
// {
|
||||||
__toString = self: "@${self.type} []";
|
__toString = self: "@${self.type} []";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Returns the GVariant variant from the given Nix value. Variants are containers
|
Returns the GVariant variant from the given Nix value. Variants are containers
|
||||||
of different GVariant type.
|
of different GVariant type.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`elem`
|
`elem`
|
||||||
@ -255,16 +263,19 @@ rec {
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
mkVariant = elem:
|
mkVariant =
|
||||||
let gvarElem = mkValue elem;
|
elem:
|
||||||
in mkPrimitive type.variant gvarElem // {
|
let
|
||||||
|
gvarElem = mkValue elem;
|
||||||
|
in
|
||||||
|
mkPrimitive type.variant gvarElem
|
||||||
|
// {
|
||||||
__toString = self: "<${toString self.value}>";
|
__toString = self: "<${toString self.value}>";
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Returns the GVariant dictionary entry from the given key and value.
|
Returns the GVariant dictionary entry from the given key and value.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`name`
|
`name`
|
||||||
@ -297,21 +308,20 @@ rec {
|
|||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
mkDictionaryEntry =
|
mkDictionaryEntry =
|
||||||
name:
|
name: value:
|
||||||
value:
|
|
||||||
let
|
let
|
||||||
name' = mkValue name;
|
name' = mkValue name;
|
||||||
value' = mkValue value;
|
value' = mkValue value;
|
||||||
dictionaryType = type.dictionaryEntryOf name'.type value'.type;
|
dictionaryType = type.dictionaryEntryOf name'.type value'.type;
|
||||||
in
|
in
|
||||||
mkPrimitive dictionaryType { inherit name value; } // {
|
mkPrimitive dictionaryType { inherit name value; }
|
||||||
|
// {
|
||||||
__toString = self: "@${self.type} {${name'},${value'}}";
|
__toString = self: "@${self.type} {${name'},${value'}}";
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Returns the GVariant maybe from the given element type.
|
Returns the GVariant maybe from the given element type.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`elemType`
|
`elemType`
|
||||||
@ -328,19 +338,17 @@ rec {
|
|||||||
mkMaybe :: gvariant.type -> Any -> gvariant
|
mkMaybe :: gvariant.type -> Any -> gvariant
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
mkMaybe = elemType: elem:
|
mkMaybe =
|
||||||
mkPrimitive (type.maybeOf elemType) elem // {
|
elemType: elem:
|
||||||
__toString = self:
|
mkPrimitive (type.maybeOf elemType) elem
|
||||||
if self.value == null then
|
// {
|
||||||
"@${self.type} nothing"
|
__toString =
|
||||||
else
|
self: if self.value == null then "@${self.type} nothing" else "just ${toString self.value}";
|
||||||
"just ${toString self.value}";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Returns the GVariant nothing from the given element type.
|
Returns the GVariant nothing from the given element type.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`elemType`
|
`elemType`
|
||||||
@ -358,7 +366,6 @@ rec {
|
|||||||
/**
|
/**
|
||||||
Returns the GVariant just from the given Nix value.
|
Returns the GVariant just from the given Nix value.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`elem`
|
`elem`
|
||||||
@ -371,12 +378,16 @@ rec {
|
|||||||
mkJust :: Any -> gvariant
|
mkJust :: Any -> gvariant
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
mkJust = elem: let gvarElem = mkValue elem; in mkMaybe gvarElem.type gvarElem;
|
mkJust =
|
||||||
|
elem:
|
||||||
|
let
|
||||||
|
gvarElem = mkValue elem;
|
||||||
|
in
|
||||||
|
mkMaybe gvarElem.type gvarElem;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Returns the GVariant tuple from the given Nix list.
|
Returns the GVariant tuple from the given Nix list.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`elems`
|
`elems`
|
||||||
@ -389,20 +400,20 @@ rec {
|
|||||||
mkTuple :: [Any] -> gvariant
|
mkTuple :: [Any] -> gvariant
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
mkTuple = elems:
|
mkTuple =
|
||||||
|
elems:
|
||||||
let
|
let
|
||||||
gvarElems = map mkValue elems;
|
gvarElems = map mkValue elems;
|
||||||
tupleType = type.tupleOf (map (e: e.type) gvarElems);
|
tupleType = type.tupleOf (map (e: e.type) gvarElems);
|
||||||
in
|
in
|
||||||
mkPrimitive tupleType gvarElems // {
|
mkPrimitive tupleType gvarElems
|
||||||
__toString = self:
|
// {
|
||||||
"@${self.type} (${concatMapStringsSep "," toString self.value})";
|
__toString = self: "@${self.type} (${concatMapStringsSep "," toString self.value})";
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Returns the GVariant boolean from the given Nix bool value.
|
Returns the GVariant boolean from the given Nix bool value.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`v`
|
`v`
|
||||||
@ -415,15 +426,16 @@ rec {
|
|||||||
mkBoolean :: Bool -> gvariant
|
mkBoolean :: Bool -> gvariant
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
mkBoolean = v:
|
mkBoolean =
|
||||||
mkPrimitive type.boolean v // {
|
v:
|
||||||
|
mkPrimitive type.boolean v
|
||||||
|
// {
|
||||||
__toString = self: if self.value then "true" else "false";
|
__toString = self: if self.value then "true" else "false";
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Returns the GVariant string from the given Nix string value.
|
Returns the GVariant string from the given Nix string value.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`v`
|
`v`
|
||||||
@ -436,16 +448,19 @@ rec {
|
|||||||
mkString :: String -> gvariant
|
mkString :: String -> gvariant
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
mkString = v:
|
mkString =
|
||||||
let sanitize = s: replaceStrings [ "\n" ] [ "\\n" ] (escape [ "'" "\\" ] s);
|
v:
|
||||||
in mkPrimitive type.string v // {
|
let
|
||||||
|
sanitize = s: replaceStrings [ "\n" ] [ "\\n" ] (escape [ "'" "\\" ] s);
|
||||||
|
in
|
||||||
|
mkPrimitive type.string v
|
||||||
|
// {
|
||||||
__toString = self: "'${sanitize self.value}'";
|
__toString = self: "'${sanitize self.value}'";
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Returns the GVariant object path from the given Nix string value.
|
Returns the GVariant object path from the given Nix string value.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`v`
|
`v`
|
||||||
@ -458,8 +473,10 @@ rec {
|
|||||||
mkObjectpath :: String -> gvariant
|
mkObjectpath :: String -> gvariant
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
mkObjectpath = v:
|
mkObjectpath =
|
||||||
mkPrimitive type.string v // {
|
v:
|
||||||
|
mkPrimitive type.string v
|
||||||
|
// {
|
||||||
__toString = self: "objectpath '${escape [ "'" ] self.value}'";
|
__toString = self: "objectpath '${escape [ "'" ] self.value}'";
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -499,7 +516,6 @@ rec {
|
|||||||
/**
|
/**
|
||||||
Returns the GVariant int32 from the given Nix int value.
|
Returns the GVariant int32 from the given Nix int value.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`v`
|
`v`
|
||||||
@ -512,8 +528,10 @@ rec {
|
|||||||
mkInt32 :: Int -> gvariant
|
mkInt32 :: Int -> gvariant
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
mkInt32 = v:
|
mkInt32 =
|
||||||
mkPrimitive type.int32 v // {
|
v:
|
||||||
|
mkPrimitive type.int32 v
|
||||||
|
// {
|
||||||
__toString = self: toString self.value;
|
__toString = self: toString self.value;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -553,7 +571,6 @@ rec {
|
|||||||
/**
|
/**
|
||||||
Returns the GVariant double from the given Nix float value.
|
Returns the GVariant double from the given Nix float value.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`v`
|
`v`
|
||||||
@ -566,8 +583,10 @@ rec {
|
|||||||
mkDouble :: Float -> gvariant
|
mkDouble :: Float -> gvariant
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
mkDouble = v:
|
mkDouble =
|
||||||
mkPrimitive type.double v // {
|
v:
|
||||||
|
mkPrimitive type.double v
|
||||||
|
// {
|
||||||
__toString = self: toString self.value;
|
__toString = self: toString self.value;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -5,17 +5,29 @@ let
|
|||||||
in
|
in
|
||||||
{
|
{
|
||||||
|
|
||||||
|
|
||||||
# Keeping these around in case we decide to change this horrible implementation :)
|
# Keeping these around in case we decide to change this horrible implementation :)
|
||||||
option = x:
|
option = x: x // { optional = true; };
|
||||||
x // { optional = true; };
|
|
||||||
|
|
||||||
yes = { tristate = "y"; optional = false; };
|
|
||||||
no = { tristate = "n"; optional = false; };
|
|
||||||
module = { tristate = "m"; optional = false; };
|
|
||||||
unset = { tristate = null; optional = false; };
|
|
||||||
freeform = x: { freeform = x; optional = false; };
|
|
||||||
|
|
||||||
|
yes = {
|
||||||
|
tristate = "y";
|
||||||
|
optional = false;
|
||||||
|
};
|
||||||
|
no = {
|
||||||
|
tristate = "n";
|
||||||
|
optional = false;
|
||||||
|
};
|
||||||
|
module = {
|
||||||
|
tristate = "m";
|
||||||
|
optional = false;
|
||||||
|
};
|
||||||
|
unset = {
|
||||||
|
tristate = null;
|
||||||
|
optional = false;
|
||||||
|
};
|
||||||
|
freeform = x: {
|
||||||
|
freeform = x;
|
||||||
|
optional = false;
|
||||||
|
};
|
||||||
|
|
||||||
# Common patterns/legacy used in common-config/hardened/config.nix
|
# Common patterns/legacy used in common-config/hardened/config.nix
|
||||||
whenHelpers = version: {
|
whenHelpers = version: {
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
/* Functions for working with path values. */
|
# Functions for working with path values.
|
||||||
# See ./README.md for internal docs
|
# See ./README.md for internal docs
|
||||||
{ lib }:
|
{ lib }:
|
||||||
let
|
let
|
||||||
@ -41,7 +41,8 @@ let
|
|||||||
;
|
;
|
||||||
|
|
||||||
# Return the reason why a subpath is invalid, or `null` if it's valid
|
# Return the reason why a subpath is invalid, or `null` if it's valid
|
||||||
subpathInvalidReason = value:
|
subpathInvalidReason =
|
||||||
|
value:
|
||||||
if !isString value then
|
if !isString value then
|
||||||
"The given value is of type ${builtins.typeOf value}, but a string was expected"
|
"The given value is of type ${builtins.typeOf value}, but a string was expected"
|
||||||
else if value == "" then
|
else if value == "" then
|
||||||
@ -51,11 +52,13 @@ let
|
|||||||
# We don't support ".." components, see ./path.md#parent-directory
|
# We don't support ".." components, see ./path.md#parent-directory
|
||||||
else if match "(.*/)?\\.\\.(/.*)?" value != null then
|
else if match "(.*/)?\\.\\.(/.*)?" value != null then
|
||||||
"The given string \"${value}\" contains a `..` component, which is not allowed in subpaths"
|
"The given string \"${value}\" contains a `..` component, which is not allowed in subpaths"
|
||||||
else null;
|
else
|
||||||
|
null;
|
||||||
|
|
||||||
# Split and normalise a relative path string into its components.
|
# Split and normalise a relative path string into its components.
|
||||||
# Error for ".." components and doesn't include "." components
|
# Error for ".." components and doesn't include "." components
|
||||||
splitRelPath = path:
|
splitRelPath =
|
||||||
|
path:
|
||||||
let
|
let
|
||||||
# Split the string into its parts using regex for efficiency. This regex
|
# Split the string into its parts using regex for efficiency. This regex
|
||||||
# matches patterns like "/", "/./", "/././", with arbitrarily many "/"s
|
# matches patterns like "/", "/./", "/././", with arbitrarily many "/"s
|
||||||
@ -89,21 +92,26 @@ let
|
|||||||
# Special case of a single "." path component. Such a case leaves a
|
# Special case of a single "." path component. Such a case leaves a
|
||||||
# componentCount of -1 due to the skipStart/skipEnd not verifying that
|
# componentCount of -1 due to the skipStart/skipEnd not verifying that
|
||||||
# they don't refer to the same character
|
# they don't refer to the same character
|
||||||
if path == "." then []
|
if path == "." then
|
||||||
|
[ ]
|
||||||
|
|
||||||
# Generate the result list directly. This is more efficient than a
|
# Generate the result list directly. This is more efficient than a
|
||||||
# combination of `filter`, `init` and `tail`, because here we don't
|
# combination of `filter`, `init` and `tail`, because here we don't
|
||||||
# allocate any intermediate lists
|
# allocate any intermediate lists
|
||||||
else genList (index:
|
else
|
||||||
|
genList (
|
||||||
|
index:
|
||||||
# To get to the element we need to add the number of parts we skip and
|
# To get to the element we need to add the number of parts we skip and
|
||||||
# multiply by two due to the interleaved layout of `parts`
|
# multiply by two due to the interleaved layout of `parts`
|
||||||
elemAt parts ((skipStart + index) * 2)
|
elemAt parts ((skipStart + index) * 2)
|
||||||
) componentCount;
|
) componentCount;
|
||||||
|
|
||||||
# Join relative path components together
|
# Join relative path components together
|
||||||
joinRelPath = components:
|
joinRelPath =
|
||||||
|
components:
|
||||||
# Always return relative paths with `./` as a prefix (./path.md#leading-dots-for-relative-paths)
|
# Always return relative paths with `./` as a prefix (./path.md#leading-dots-for-relative-paths)
|
||||||
"./" +
|
"./"
|
||||||
|
+
|
||||||
# An empty string is not a valid relative path, so we need to return a `.` when we have no components
|
# An empty string is not a valid relative path, so we need to return a `.` when we have no components
|
||||||
(if components == [ ] then "." else concatStringsSep "/" components);
|
(if components == [ ] then "." else concatStringsSep "/" components);
|
||||||
|
|
||||||
@ -117,11 +125,18 @@ let
|
|||||||
# because it can distinguish different filesystem roots
|
# because it can distinguish different filesystem roots
|
||||||
deconstructPath =
|
deconstructPath =
|
||||||
let
|
let
|
||||||
recurse = components: base:
|
recurse =
|
||||||
|
components: base:
|
||||||
# If the parent of a path is the path itself, then it's a filesystem root
|
# If the parent of a path is the path itself, then it's a filesystem root
|
||||||
if base == dirOf base then { root = base; inherit components; }
|
if base == dirOf base then
|
||||||
else recurse ([ (baseNameOf base) ] ++ components) (dirOf base);
|
{
|
||||||
in recurse [];
|
root = base;
|
||||||
|
inherit components;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
recurse ([ (baseNameOf base) ] ++ components) (dirOf base);
|
||||||
|
in
|
||||||
|
recurse [ ];
|
||||||
|
|
||||||
# The components of the store directory, typically [ "nix" "store" ]
|
# The components of the store directory, typically [ "nix" "store" ]
|
||||||
storeDirComponents = splitRelPath ("./" + storeDir);
|
storeDirComponents = splitRelPath ("./" + storeDir);
|
||||||
@ -132,7 +147,8 @@ let
|
|||||||
#
|
#
|
||||||
# Whether path components have a store path as a prefix, according to
|
# Whether path components have a store path as a prefix, according to
|
||||||
# https://nixos.org/manual/nix/stable/store/store-path.html#store-path.
|
# https://nixos.org/manual/nix/stable/store/store-path.html#store-path.
|
||||||
componentsHaveStorePathPrefix = components:
|
componentsHaveStorePathPrefix =
|
||||||
|
components:
|
||||||
# path starts with the store directory (typically /nix/store)
|
# path starts with the store directory (typically /nix/store)
|
||||||
listHasPrefix storeDirComponents components
|
listHasPrefix storeDirComponents components
|
||||||
# is not the store directory itself, meaning there's at least one extra component
|
# is not the store directory itself, meaning there's at least one extra component
|
||||||
@ -145,7 +161,9 @@ let
|
|||||||
# We care more about discerning store path-ness on realistic values. Making it airtight would be fragile and slow.
|
# We care more about discerning store path-ness on realistic values. Making it airtight would be fragile and slow.
|
||||||
&& match ".{32}-.+" (elemAt components storeDirLength) != null;
|
&& match ".{32}-.+" (elemAt components storeDirLength) != null;
|
||||||
|
|
||||||
in /* No rec! Add dependencies on this file at the top. */ {
|
in
|
||||||
|
# No rec! Add dependencies on this file at the top.
|
||||||
|
{
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Append a subpath string to a path.
|
Append a subpath string to a path.
|
||||||
@ -194,8 +212,8 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
|||||||
path:
|
path:
|
||||||
# The subpath string to append
|
# The subpath string to append
|
||||||
subpath:
|
subpath:
|
||||||
assert assertMsg (isPath path) ''
|
assert assertMsg (isPath path)
|
||||||
lib.path.append: The first argument is of type ${builtins.typeOf path}, but a path was expected'';
|
''lib.path.append: The first argument is of type ${builtins.typeOf path}, but a path was expected'';
|
||||||
assert assertMsg (isValid subpath) ''
|
assert assertMsg (isValid subpath) ''
|
||||||
lib.path.append: Second argument is not a valid subpath string:
|
lib.path.append: Second argument is not a valid subpath string:
|
||||||
${subpathInvalidReason subpath}'';
|
${subpathInvalidReason subpath}'';
|
||||||
@ -225,25 +243,23 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
|||||||
*/
|
*/
|
||||||
hasPrefix =
|
hasPrefix =
|
||||||
path1:
|
path1:
|
||||||
assert assertMsg
|
assert assertMsg (isPath path1)
|
||||||
(isPath path1)
|
|
||||||
"lib.path.hasPrefix: First argument is of type ${typeOf path1}, but a path was expected";
|
"lib.path.hasPrefix: First argument is of type ${typeOf path1}, but a path was expected";
|
||||||
let
|
let
|
||||||
path1Deconstructed = deconstructPath path1;
|
path1Deconstructed = deconstructPath path1;
|
||||||
in
|
in
|
||||||
path2:
|
path2:
|
||||||
assert assertMsg
|
assert assertMsg (isPath path2)
|
||||||
(isPath path2)
|
|
||||||
"lib.path.hasPrefix: Second argument is of type ${typeOf path2}, but a path was expected";
|
"lib.path.hasPrefix: Second argument is of type ${typeOf path2}, but a path was expected";
|
||||||
let
|
let
|
||||||
path2Deconstructed = deconstructPath path2;
|
path2Deconstructed = deconstructPath path2;
|
||||||
in
|
in
|
||||||
assert assertMsg
|
assert assertMsg (path1Deconstructed.root == path2Deconstructed.root) ''
|
||||||
(path1Deconstructed.root == path2Deconstructed.root) ''
|
|
||||||
lib.path.hasPrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
|
lib.path.hasPrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
|
||||||
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
|
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
|
||||||
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
|
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
|
||||||
take (length path1Deconstructed.components) path2Deconstructed.components == path1Deconstructed.components;
|
take (length path1Deconstructed.components) path2Deconstructed.components
|
||||||
|
== path1Deconstructed.components;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Remove the first path as a component-wise prefix from the second path.
|
Remove the first path as a component-wise prefix from the second path.
|
||||||
@ -270,16 +286,14 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
|||||||
*/
|
*/
|
||||||
removePrefix =
|
removePrefix =
|
||||||
path1:
|
path1:
|
||||||
assert assertMsg
|
assert assertMsg (isPath path1)
|
||||||
(isPath path1)
|
|
||||||
"lib.path.removePrefix: First argument is of type ${typeOf path1}, but a path was expected.";
|
"lib.path.removePrefix: First argument is of type ${typeOf path1}, but a path was expected.";
|
||||||
let
|
let
|
||||||
path1Deconstructed = deconstructPath path1;
|
path1Deconstructed = deconstructPath path1;
|
||||||
path1Length = length path1Deconstructed.components;
|
path1Length = length path1Deconstructed.components;
|
||||||
in
|
in
|
||||||
path2:
|
path2:
|
||||||
assert assertMsg
|
assert assertMsg (isPath path2)
|
||||||
(isPath path2)
|
|
||||||
"lib.path.removePrefix: Second argument is of type ${typeOf path2}, but a path was expected.";
|
"lib.path.removePrefix: Second argument is of type ${typeOf path2}, but a path was expected.";
|
||||||
let
|
let
|
||||||
path2Deconstructed = deconstructPath path2;
|
path2Deconstructed = deconstructPath path2;
|
||||||
@ -288,11 +302,9 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
|||||||
if success then
|
if success then
|
||||||
drop path1Length path2Deconstructed.components
|
drop path1Length path2Deconstructed.components
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''lib.path.removePrefix: The first path argument "${toString path1}" is not a component-wise prefix of the second path argument "${toString path2}".'';
|
||||||
lib.path.removePrefix: The first path argument "${toString path1}" is not a component-wise prefix of the second path argument "${toString path2}".'';
|
|
||||||
in
|
in
|
||||||
assert assertMsg
|
assert assertMsg (path1Deconstructed.root == path2Deconstructed.root) ''
|
||||||
(path1Deconstructed.root == path2Deconstructed.root) ''
|
|
||||||
lib.path.removePrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
|
lib.path.removePrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
|
||||||
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
|
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
|
||||||
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
|
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
|
||||||
@ -336,12 +348,12 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
|||||||
splitRoot =
|
splitRoot =
|
||||||
# The path to split the root off of
|
# The path to split the root off of
|
||||||
path:
|
path:
|
||||||
assert assertMsg
|
assert assertMsg (isPath path)
|
||||||
(isPath path)
|
|
||||||
"lib.path.splitRoot: Argument is of type ${typeOf path}, but a path was expected";
|
"lib.path.splitRoot: Argument is of type ${typeOf path}, but a path was expected";
|
||||||
let
|
let
|
||||||
deconstructed = deconstructPath path;
|
deconstructed = deconstructPath path;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
root = deconstructed.root;
|
root = deconstructed.root;
|
||||||
subpath = joinRelPath deconstructed.components;
|
subpath = joinRelPath deconstructed.components;
|
||||||
};
|
};
|
||||||
@ -387,12 +399,12 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
|||||||
hasStorePathPrefix /nix/store/nvl9ic0pj1fpyln3zaqrf4cclbqdfn1j-foo.drv
|
hasStorePathPrefix /nix/store/nvl9ic0pj1fpyln3zaqrf4cclbqdfn1j-foo.drv
|
||||||
=> true
|
=> true
|
||||||
*/
|
*/
|
||||||
hasStorePathPrefix = path:
|
hasStorePathPrefix =
|
||||||
|
path:
|
||||||
let
|
let
|
||||||
deconstructed = deconstructPath path;
|
deconstructed = deconstructPath path;
|
||||||
in
|
in
|
||||||
assert assertMsg
|
assert assertMsg (isPath path)
|
||||||
(isPath path)
|
|
||||||
"lib.path.hasStorePathPrefix: Argument is of type ${typeOf path}, but a path was expected";
|
"lib.path.hasStorePathPrefix: Argument is of type ${typeOf path}, but a path was expected";
|
||||||
assert assertMsg
|
assert assertMsg
|
||||||
# This function likely breaks or needs adjustment if used with other filesystem roots, if they ever get implemented.
|
# This function likely breaks or needs adjustment if used with other filesystem roots, if they ever get implemented.
|
||||||
@ -446,9 +458,7 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
|||||||
*/
|
*/
|
||||||
subpath.isValid =
|
subpath.isValid =
|
||||||
# The value to check
|
# The value to check
|
||||||
value:
|
value: subpathInvalidReason value == null;
|
||||||
subpathInvalidReason value == null;
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Join subpath strings together using `/`, returning a normalised subpath string.
|
Join subpath strings together using `/`, returning a normalised subpath string.
|
||||||
@ -511,16 +521,18 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
|||||||
# The list of subpaths to join together
|
# The list of subpaths to join together
|
||||||
subpaths:
|
subpaths:
|
||||||
# Fast in case all paths are valid
|
# Fast in case all paths are valid
|
||||||
if all isValid subpaths
|
if all isValid subpaths then
|
||||||
then joinRelPath (concatMap splitRelPath subpaths)
|
joinRelPath (concatMap splitRelPath subpaths)
|
||||||
else
|
else
|
||||||
# Otherwise we take our time to gather more info for a better error message
|
# Otherwise we take our time to gather more info for a better error message
|
||||||
# Strictly go through each path, throwing on the first invalid one
|
# Strictly go through each path, throwing on the first invalid one
|
||||||
# Tracks the list index in the fold accumulator
|
# Tracks the list index in the fold accumulator
|
||||||
foldl' (i: path:
|
foldl' (
|
||||||
if isValid path
|
i: path:
|
||||||
then i + 1
|
if isValid path then
|
||||||
else throw ''
|
i + 1
|
||||||
|
else
|
||||||
|
throw ''
|
||||||
lib.path.subpath.join: Element at index ${toString i} is not a valid subpath string:
|
lib.path.subpath.join: Element at index ${toString i} is not a valid subpath string:
|
||||||
${subpathInvalidReason path}''
|
${subpathInvalidReason path}''
|
||||||
) 0 subpaths;
|
) 0 subpaths;
|
||||||
|
@ -12,14 +12,18 @@
|
|||||||
seed ? null,
|
seed ? null,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
pkgs.runCommand "lib-path-tests" {
|
pkgs.runCommand "lib-path-tests"
|
||||||
nativeBuildInputs = [
|
{
|
||||||
|
nativeBuildInputs =
|
||||||
|
[
|
||||||
nixVersions.stable
|
nixVersions.stable
|
||||||
] ++ (with pkgs; [
|
]
|
||||||
|
++ (with pkgs; [
|
||||||
jq
|
jq
|
||||||
bc
|
bc
|
||||||
]);
|
]);
|
||||||
} ''
|
}
|
||||||
|
''
|
||||||
# Needed to make Nix evaluation work
|
# Needed to make Nix evaluation work
|
||||||
export TEST_ROOT=$(pwd)/test-tmp
|
export TEST_ROOT=$(pwd)/test-tmp
|
||||||
export NIX_BUILD_HOOK=
|
export NIX_BUILD_HOOK=
|
||||||
|
@ -16,14 +16,15 @@ let
|
|||||||
lib = import libpath;
|
lib = import libpath;
|
||||||
|
|
||||||
# read each file into a string
|
# read each file into a string
|
||||||
strings = map (name:
|
strings = map (name: builtins.readFile (dir + "/${name}")) (
|
||||||
builtins.readFile (dir + "/${name}")
|
builtins.attrNames (builtins.readDir dir)
|
||||||
) (builtins.attrNames (builtins.readDir dir));
|
);
|
||||||
|
|
||||||
inherit (lib.path.subpath) normalise isValid;
|
inherit (lib.path.subpath) normalise isValid;
|
||||||
inherit (lib.asserts) assertMsg;
|
inherit (lib.asserts) assertMsg;
|
||||||
|
|
||||||
normaliseAndCheck = str:
|
normaliseAndCheck =
|
||||||
|
str:
|
||||||
let
|
let
|
||||||
originalValid = isValid str;
|
originalValid = isValid str;
|
||||||
|
|
||||||
@ -34,27 +35,26 @@ let
|
|||||||
absConcatNormalised = /. + ("/" + tryOnce.value);
|
absConcatNormalised = /. + ("/" + tryOnce.value);
|
||||||
in
|
in
|
||||||
# Check the lib.path.subpath.normalise property to only error on invalid subpaths
|
# Check the lib.path.subpath.normalise property to only error on invalid subpaths
|
||||||
assert assertMsg
|
assert assertMsg (
|
||||||
(originalValid -> tryOnce.success)
|
originalValid -> tryOnce.success
|
||||||
"Even though string \"${str}\" is valid as a subpath, the normalisation for it failed";
|
) "Even though string \"${str}\" is valid as a subpath, the normalisation for it failed";
|
||||||
assert assertMsg
|
assert assertMsg (
|
||||||
(! originalValid -> ! tryOnce.success)
|
!originalValid -> !tryOnce.success
|
||||||
"Even though string \"${str}\" is invalid as a subpath, the normalisation for it succeeded";
|
) "Even though string \"${str}\" is invalid as a subpath, the normalisation for it succeeded";
|
||||||
|
|
||||||
# Check normalisation idempotency
|
# Check normalisation idempotency
|
||||||
assert assertMsg
|
assert assertMsg (
|
||||||
(originalValid -> tryTwice.success)
|
originalValid -> tryTwice.success
|
||||||
"For valid subpath \"${str}\", the normalisation \"${tryOnce.value}\" was not a valid subpath";
|
) "For valid subpath \"${str}\", the normalisation \"${tryOnce.value}\" was not a valid subpath";
|
||||||
assert assertMsg
|
assert assertMsg (originalValid -> tryOnce.value == tryTwice.value)
|
||||||
(originalValid -> tryOnce.value == tryTwice.value)
|
|
||||||
"For valid subpath \"${str}\", normalising it once gives \"${tryOnce.value}\" but normalising it twice gives a different result: \"${tryTwice.value}\"";
|
"For valid subpath \"${str}\", normalising it once gives \"${tryOnce.value}\" but normalising it twice gives a different result: \"${tryTwice.value}\"";
|
||||||
|
|
||||||
# Check that normalisation doesn't change a string when appended to an absolute Nix path value
|
# Check that normalisation doesn't change a string when appended to an absolute Nix path value
|
||||||
assert assertMsg
|
assert assertMsg (originalValid -> absConcatOrig == absConcatNormalised)
|
||||||
(originalValid -> absConcatOrig == absConcatNormalised)
|
|
||||||
"For valid subpath \"${str}\", appending to an absolute Nix path value gives \"${absConcatOrig}\", but appending the normalised result \"${tryOnce.value}\" gives a different value \"${absConcatNormalised}\"";
|
"For valid subpath \"${str}\", appending to an absolute Nix path value gives \"${absConcatOrig}\", but appending the normalised result \"${tryOnce.value}\" gives a different value \"${absConcatNormalised}\"";
|
||||||
|
|
||||||
# Return an empty string when failed
|
# Return an empty string when failed
|
||||||
if tryOnce.success then tryOnce.value else "";
|
if tryOnce.success then tryOnce.value else "";
|
||||||
|
|
||||||
in lib.genAttrs strings normaliseAndCheck
|
in
|
||||||
|
lib.genAttrs strings normaliseAndCheck
|
||||||
|
@ -3,7 +3,14 @@
|
|||||||
{ libpath }:
|
{ libpath }:
|
||||||
let
|
let
|
||||||
lib = import libpath;
|
lib = import libpath;
|
||||||
inherit (lib.path) hasPrefix removePrefix append splitRoot hasStorePathPrefix subpath;
|
inherit (lib.path)
|
||||||
|
hasPrefix
|
||||||
|
removePrefix
|
||||||
|
append
|
||||||
|
splitRoot
|
||||||
|
hasStorePathPrefix
|
||||||
|
subpath
|
||||||
|
;
|
||||||
|
|
||||||
# This is not allowed generally, but we're in the tests here, so we'll allow ourselves.
|
# This is not allowed generally, but we're in the tests here, so we'll allow ourselves.
|
||||||
storeDirPath = /. + builtins.storeDir;
|
storeDirPath = /. + builtins.storeDir;
|
||||||
@ -79,15 +86,24 @@ let
|
|||||||
|
|
||||||
testSplitRootExample1 = {
|
testSplitRootExample1 = {
|
||||||
expr = splitRoot /foo/bar;
|
expr = splitRoot /foo/bar;
|
||||||
expected = { root = /.; subpath = "./foo/bar"; };
|
expected = {
|
||||||
|
root = /.;
|
||||||
|
subpath = "./foo/bar";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
testSplitRootExample2 = {
|
testSplitRootExample2 = {
|
||||||
expr = splitRoot /.;
|
expr = splitRoot /.;
|
||||||
expected = { root = /.; subpath = "./."; };
|
expected = {
|
||||||
|
root = /.;
|
||||||
|
subpath = "./.";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
testSplitRootExample3 = {
|
testSplitRootExample3 = {
|
||||||
expr = splitRoot /foo/../bar;
|
expr = splitRoot /foo/../bar;
|
||||||
expected = { root = /.; subpath = "./bar"; };
|
expected = {
|
||||||
|
root = /.;
|
||||||
|
subpath = "./bar";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
testSplitRootExample4 = {
|
testSplitRootExample4 = {
|
||||||
expr = (builtins.tryEval (splitRoot "/foo/bar")).success;
|
expr = (builtins.tryEval (splitRoot "/foo/bar")).success;
|
||||||
@ -111,7 +127,9 @@ let
|
|||||||
expected = false;
|
expected = false;
|
||||||
};
|
};
|
||||||
testHasStorePathPrefixExample5 = {
|
testHasStorePathPrefixExample5 = {
|
||||||
expr = hasStorePathPrefix (storeDirPath + "/.links/10gg8k3rmbw8p7gszarbk7qyd9jwxhcfq9i6s5i0qikx8alkk4hq");
|
expr = hasStorePathPrefix (
|
||||||
|
storeDirPath + "/.links/10gg8k3rmbw8p7gszarbk7qyd9jwxhcfq9i6s5i0qikx8alkk4hq"
|
||||||
|
);
|
||||||
expected = false;
|
expected = false;
|
||||||
};
|
};
|
||||||
testHasStorePathPrefixExample6 = {
|
testHasStorePathPrefixExample6 = {
|
||||||
@ -188,11 +206,18 @@ let
|
|||||||
|
|
||||||
# Test examples from the lib.path.subpath.join documentation
|
# Test examples from the lib.path.subpath.join documentation
|
||||||
testSubpathJoinExample1 = {
|
testSubpathJoinExample1 = {
|
||||||
expr = subpath.join [ "foo" "bar/baz" ];
|
expr = subpath.join [
|
||||||
|
"foo"
|
||||||
|
"bar/baz"
|
||||||
|
];
|
||||||
expected = "./foo/bar/baz";
|
expected = "./foo/bar/baz";
|
||||||
};
|
};
|
||||||
testSubpathJoinExample2 = {
|
testSubpathJoinExample2 = {
|
||||||
expr = subpath.join [ "./foo" "." "bar//./baz/" ];
|
expr = subpath.join [
|
||||||
|
"./foo"
|
||||||
|
"."
|
||||||
|
"bar//./baz/"
|
||||||
|
];
|
||||||
expected = "./foo/bar/baz";
|
expected = "./foo/bar/baz";
|
||||||
};
|
};
|
||||||
testSubpathJoinExample3 = {
|
testSubpathJoinExample3 = {
|
||||||
@ -273,7 +298,11 @@ let
|
|||||||
};
|
};
|
||||||
testSubpathComponentsExample2 = {
|
testSubpathComponentsExample2 = {
|
||||||
expr = subpath.components "./foo//bar/./baz/";
|
expr = subpath.components "./foo//bar/./baz/";
|
||||||
expected = [ "foo" "bar" "baz" ];
|
expected = [
|
||||||
|
"foo"
|
||||||
|
"bar"
|
||||||
|
"baz"
|
||||||
|
];
|
||||||
};
|
};
|
||||||
testSubpathComponentsExample3 = {
|
testSubpathComponentsExample3 = {
|
||||||
expr = (builtins.tryEval (subpath.components "/foo")).success;
|
expr = (builtins.tryEval (subpath.components "/foo")).success;
|
||||||
@ -281,5 +310,7 @@ let
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
in
|
in
|
||||||
if cases == [] then "Unit tests successful"
|
if cases == [ ] then
|
||||||
else throw "Path unit tests failed: ${lib.generators.toPretty {} cases}"
|
"Unit tests successful"
|
||||||
|
else
|
||||||
|
throw "Path unit tests failed: ${lib.generators.toPretty { } cases}"
|
||||||
|
@ -5,7 +5,8 @@ let
|
|||||||
shortName = tname;
|
shortName = tname;
|
||||||
isSource = false;
|
isSource = false;
|
||||||
};
|
};
|
||||||
in lib.mapAttrs (tname: tset: defaultSourceType tname // tset) {
|
in
|
||||||
|
lib.mapAttrs (tname: tset: defaultSourceType tname // tset) {
|
||||||
|
|
||||||
fromSource = {
|
fromSource = {
|
||||||
isSource = true;
|
isSource = true;
|
||||||
|
184
lib/sources.nix
184
lib/sources.nix
@ -1,4 +1,4 @@
|
|||||||
/* Functions for copying sources to the Nix store. */
|
# Functions for copying sources to the Nix store.
|
||||||
{ lib }:
|
{ lib }:
|
||||||
|
|
||||||
# Tested in lib/tests/sources.sh
|
# Tested in lib/tests/sources.sh
|
||||||
@ -23,19 +23,31 @@ let
|
|||||||
directories of version control system, backup files (*~)
|
directories of version control system, backup files (*~)
|
||||||
and some generated files.
|
and some generated files.
|
||||||
*/
|
*/
|
||||||
cleanSourceFilter = name: type: let baseName = baseNameOf (toString name); in ! (
|
cleanSourceFilter =
|
||||||
|
name: type:
|
||||||
|
let
|
||||||
|
baseName = baseNameOf (toString name);
|
||||||
|
in
|
||||||
|
!(
|
||||||
# Filter out version control software files/directories
|
# Filter out version control software files/directories
|
||||||
(baseName == ".git" || type == "directory" && (baseName == ".svn" || baseName == "CVS" || baseName == ".hg")) ||
|
(
|
||||||
|
baseName == ".git"
|
||||||
|
|| type == "directory" && (baseName == ".svn" || baseName == "CVS" || baseName == ".hg")
|
||||||
|
)
|
||||||
|
||
|
||||||
# Filter out editor backup / swap files.
|
# Filter out editor backup / swap files.
|
||||||
lib.hasSuffix "~" baseName ||
|
lib.hasSuffix "~" baseName
|
||||||
match "^\\.sw[a-z]$" baseName != null ||
|
|| match "^\\.sw[a-z]$" baseName != null
|
||||||
match "^\\..*\\.sw[a-z]$" baseName != null ||
|
|| match "^\\..*\\.sw[a-z]$" baseName != null
|
||||||
|
||
|
||||||
|
|
||||||
# Filter out generates files.
|
# Filter out generates files.
|
||||||
lib.hasSuffix ".o" baseName ||
|
lib.hasSuffix ".o" baseName
|
||||||
lib.hasSuffix ".so" baseName ||
|
|| lib.hasSuffix ".so" baseName
|
||||||
|
||
|
||||||
# Filter out nix-build result symlinks
|
# Filter out nix-build result symlinks
|
||||||
(type == "symlink" && lib.hasPrefix "result" baseName) ||
|
(type == "symlink" && lib.hasPrefix "result" baseName)
|
||||||
|
||
|
||||||
# Filter out sockets and other types of files we can't have in the store.
|
# Filter out sockets and other types of files we can't have in the store.
|
||||||
(type == "unknown")
|
(type == "unknown")
|
||||||
);
|
);
|
||||||
@ -46,7 +58,12 @@ let
|
|||||||
Example:
|
Example:
|
||||||
cleanSource ./.
|
cleanSource ./.
|
||||||
*/
|
*/
|
||||||
cleanSource = src: cleanSourceWith { filter = cleanSourceFilter; inherit src; };
|
cleanSource =
|
||||||
|
src:
|
||||||
|
cleanSourceWith {
|
||||||
|
filter = cleanSourceFilter;
|
||||||
|
inherit src;
|
||||||
|
};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Like `builtins.filterSource`, except it will compose with itself,
|
Like `builtins.filterSource`, except it will compose with itself,
|
||||||
@ -65,7 +82,6 @@ let
|
|||||||
|
|
||||||
builtins.filterSource f (builtins.filterSource g ./.)
|
builtins.filterSource f (builtins.filterSource g ./.)
|
||||||
# Fails!
|
# Fails!
|
||||||
|
|
||||||
*/
|
*/
|
||||||
cleanSourceWith =
|
cleanSourceWith =
|
||||||
{
|
{
|
||||||
@ -80,11 +96,12 @@ let
|
|||||||
filter ? _path: _type: true,
|
filter ? _path: _type: true,
|
||||||
# Optional name to use as part of the store path.
|
# Optional name to use as part of the store path.
|
||||||
# This defaults to `src.name` or otherwise `"source"`.
|
# This defaults to `src.name` or otherwise `"source"`.
|
||||||
name ? null
|
name ? null,
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
orig = toSourceAttributes src;
|
orig = toSourceAttributes src;
|
||||||
in fromSourceAttributes {
|
in
|
||||||
|
fromSourceAttributes {
|
||||||
inherit (orig) origSrc;
|
inherit (orig) origSrc;
|
||||||
filter = path: type: filter path type && orig.filter path type;
|
filter = path: type: filter path type && orig.filter path type;
|
||||||
name = if name != null then name else orig.name;
|
name = if name != null then name else orig.name;
|
||||||
@ -102,14 +119,17 @@ let
|
|||||||
attrs = toSourceAttributes src;
|
attrs = toSourceAttributes src;
|
||||||
in
|
in
|
||||||
fromSourceAttributes (
|
fromSourceAttributes (
|
||||||
attrs // {
|
attrs
|
||||||
filter = path: type:
|
// {
|
||||||
|
filter =
|
||||||
|
path: type:
|
||||||
let
|
let
|
||||||
r = attrs.filter path type;
|
r = attrs.filter path type;
|
||||||
in
|
in
|
||||||
builtins.trace "${attrs.name}.filter ${path} = ${boolToString r}" r;
|
builtins.trace "${attrs.name}.filter ${path} = ${boolToString r}" r;
|
||||||
}
|
}
|
||||||
) // {
|
)
|
||||||
|
// {
|
||||||
satisfiesSubpathInvariant = src ? satisfiesSubpathInvariant && src.satisfiesSubpathInvariant;
|
satisfiesSubpathInvariant = src ? satisfiesSubpathInvariant && src.satisfiesSubpathInvariant;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -118,14 +138,20 @@ let
|
|||||||
|
|
||||||
Example: src = sourceByRegex ./my-subproject [".*\.py$" "^database.sql$"]
|
Example: src = sourceByRegex ./my-subproject [".*\.py$" "^database.sql$"]
|
||||||
*/
|
*/
|
||||||
sourceByRegex = src: regexes:
|
sourceByRegex =
|
||||||
|
src: regexes:
|
||||||
let
|
let
|
||||||
isFiltered = src ? _isLibCleanSourceWith;
|
isFiltered = src ? _isLibCleanSourceWith;
|
||||||
origSrc = if isFiltered then src.origSrc else src;
|
origSrc = if isFiltered then src.origSrc else src;
|
||||||
in lib.cleanSourceWith {
|
in
|
||||||
filter = (path: type:
|
lib.cleanSourceWith {
|
||||||
let relPath = lib.removePrefix (toString origSrc + "/") (toString path);
|
filter = (
|
||||||
in lib.any (re: match re relPath != null) regexes);
|
path: type:
|
||||||
|
let
|
||||||
|
relPath = lib.removePrefix (toString origSrc + "/") (toString path);
|
||||||
|
in
|
||||||
|
lib.any (re: match re relPath != null) regexes
|
||||||
|
);
|
||||||
inherit src;
|
inherit src;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -145,10 +171,15 @@ let
|
|||||||
src:
|
src:
|
||||||
# A list of file suffix strings
|
# A list of file suffix strings
|
||||||
exts:
|
exts:
|
||||||
let filter = name: type:
|
let
|
||||||
let base = baseNameOf (toString name);
|
filter =
|
||||||
in type == "directory" || lib.any (ext: lib.hasSuffix ext base) exts;
|
name: type:
|
||||||
in cleanSourceWith { inherit filter src; };
|
let
|
||||||
|
base = baseNameOf (toString name);
|
||||||
|
in
|
||||||
|
type == "directory" || lib.any (ext: lib.hasSuffix ext base) exts;
|
||||||
|
in
|
||||||
|
cleanSourceWith { inherit filter src; };
|
||||||
|
|
||||||
pathIsGitRepo = path: (_commitIdFromGitRepoOrError path) ? value;
|
pathIsGitRepo = path: (_commitIdFromGitRepoOrError path) ? value;
|
||||||
|
|
||||||
@ -157,9 +188,12 @@ let
|
|||||||
|
|
||||||
Example: commitIdFromGitRepo <nixpkgs/.git>
|
Example: commitIdFromGitRepo <nixpkgs/.git>
|
||||||
*/
|
*/
|
||||||
commitIdFromGitRepo = path:
|
commitIdFromGitRepo =
|
||||||
let commitIdOrError = _commitIdFromGitRepoOrError path;
|
path:
|
||||||
in commitIdOrError.value or (throw commitIdOrError.error);
|
let
|
||||||
|
commitIdOrError = _commitIdFromGitRepoOrError path;
|
||||||
|
in
|
||||||
|
commitIdOrError.value or (throw commitIdOrError.error);
|
||||||
|
|
||||||
# Get the commit id of a git repo.
|
# Get the commit id of a git repo.
|
||||||
|
|
||||||
@ -168,55 +202,68 @@ let
|
|||||||
# Example: commitIdFromGitRepo <nixpkgs/.git>
|
# Example: commitIdFromGitRepo <nixpkgs/.git>
|
||||||
# not exported, used for commitIdFromGitRepo
|
# not exported, used for commitIdFromGitRepo
|
||||||
_commitIdFromGitRepoOrError =
|
_commitIdFromGitRepoOrError =
|
||||||
let readCommitFromFile = file: path:
|
let
|
||||||
let fileName = path + "/${file}";
|
readCommitFromFile =
|
||||||
|
file: path:
|
||||||
|
let
|
||||||
|
fileName = path + "/${file}";
|
||||||
packedRefsName = path + "/packed-refs";
|
packedRefsName = path + "/packed-refs";
|
||||||
absolutePath = base: path:
|
absolutePath =
|
||||||
if lib.hasPrefix "/" path
|
base: path: if lib.hasPrefix "/" path then path else toString (/. + "${base}/${path}");
|
||||||
then path
|
in
|
||||||
else toString (/. + "${base}/${path}");
|
if
|
||||||
in if pathIsRegularFile path
|
pathIsRegularFile path
|
||||||
# Resolve git worktrees. See gitrepository-layout(5)
|
# Resolve git worktrees. See gitrepository-layout(5)
|
||||||
then
|
then
|
||||||
let m = match "^gitdir: (.*)$" (lib.fileContents path);
|
let
|
||||||
in if m == null
|
m = match "^gitdir: (.*)$" (lib.fileContents path);
|
||||||
then { error = "File contains no gitdir reference: " + path; }
|
in
|
||||||
|
if m == null then
|
||||||
|
{ error = "File contains no gitdir reference: " + path; }
|
||||||
else
|
else
|
||||||
let gitDir = absolutePath (dirOf path) (lib.head m);
|
let
|
||||||
commonDir'' = if pathIsRegularFile "${gitDir}/commondir"
|
gitDir = absolutePath (dirOf path) (lib.head m);
|
||||||
then lib.fileContents "${gitDir}/commondir"
|
commonDir'' =
|
||||||
else gitDir;
|
if pathIsRegularFile "${gitDir}/commondir" then lib.fileContents "${gitDir}/commondir" else gitDir;
|
||||||
commonDir' = lib.removeSuffix "/" commonDir'';
|
commonDir' = lib.removeSuffix "/" commonDir'';
|
||||||
commonDir = absolutePath gitDir commonDir';
|
commonDir = absolutePath gitDir commonDir';
|
||||||
refFile = lib.removePrefix "${commonDir}/" "${gitDir}/${file}";
|
refFile = lib.removePrefix "${commonDir}/" "${gitDir}/${file}";
|
||||||
in readCommitFromFile refFile commonDir
|
in
|
||||||
|
readCommitFromFile refFile commonDir
|
||||||
|
|
||||||
else if pathIsRegularFile fileName
|
else if
|
||||||
|
pathIsRegularFile fileName
|
||||||
# Sometimes git stores the commitId directly in the file but
|
# Sometimes git stores the commitId directly in the file but
|
||||||
# sometimes it stores something like: «ref: refs/heads/branch-name»
|
# sometimes it stores something like: «ref: refs/heads/branch-name»
|
||||||
then
|
then
|
||||||
let fileContent = lib.fileContents fileName;
|
let
|
||||||
|
fileContent = lib.fileContents fileName;
|
||||||
matchRef = match "^ref: (.*)$" fileContent;
|
matchRef = match "^ref: (.*)$" fileContent;
|
||||||
in if matchRef == null
|
in
|
||||||
then { value = fileContent; }
|
if matchRef == null then { value = fileContent; } else readCommitFromFile (lib.head matchRef) path
|
||||||
else readCommitFromFile (lib.head matchRef) path
|
|
||||||
|
|
||||||
else if pathIsRegularFile packedRefsName
|
else if
|
||||||
|
pathIsRegularFile packedRefsName
|
||||||
# Sometimes, the file isn't there at all and has been packed away in the
|
# Sometimes, the file isn't there at all and has been packed away in the
|
||||||
# packed-refs file, so we have to grep through it:
|
# packed-refs file, so we have to grep through it:
|
||||||
then
|
then
|
||||||
let fileContent = readFile packedRefsName;
|
let
|
||||||
|
fileContent = readFile packedRefsName;
|
||||||
matchRef = match "([a-z0-9]+) ${file}";
|
matchRef = match "([a-z0-9]+) ${file}";
|
||||||
isRef = s: isString s && (matchRef s) != null;
|
isRef = s: isString s && (matchRef s) != null;
|
||||||
# there is a bug in libstdc++ leading to stackoverflow for long strings:
|
# there is a bug in libstdc++ leading to stackoverflow for long strings:
|
||||||
# https://github.com/NixOS/nix/issues/2147#issuecomment-659868795
|
# https://github.com/NixOS/nix/issues/2147#issuecomment-659868795
|
||||||
refs = filter isRef (split "\n" fileContent);
|
refs = filter isRef (split "\n" fileContent);
|
||||||
in if refs == []
|
in
|
||||||
then { error = "Could not find " + file + " in " + packedRefsName; }
|
if refs == [ ] then
|
||||||
else { value = lib.head (matchRef (lib.head refs)); }
|
{ error = "Could not find " + file + " in " + packedRefsName; }
|
||||||
|
else
|
||||||
|
{ value = lib.head (matchRef (lib.head refs)); }
|
||||||
|
|
||||||
else { error = "Not a .git directory: " + toString path; };
|
else
|
||||||
in readCommitFromFile "HEAD";
|
{ error = "Not a .git directory: " + toString path; };
|
||||||
|
in
|
||||||
|
readCommitFromFile "HEAD";
|
||||||
|
|
||||||
pathHasContext = builtins.hasContext or (lib.hasPrefix storeDir);
|
pathHasContext = builtins.hasContext or (lib.hasPrefix storeDir);
|
||||||
|
|
||||||
@ -233,7 +280,8 @@ let
|
|||||||
# like class of objects in the wild.
|
# like class of objects in the wild.
|
||||||
# (Existing ones being: paths, strings, sources and x//{outPath})
|
# (Existing ones being: paths, strings, sources and x//{outPath})
|
||||||
# So instead of exposing internals, we build a library of combinator functions.
|
# So instead of exposing internals, we build a library of combinator functions.
|
||||||
toSourceAttributes = src:
|
toSourceAttributes =
|
||||||
|
src:
|
||||||
let
|
let
|
||||||
isFiltered = src ? _isLibCleanSourceWith;
|
isFiltered = src ? _isLibCleanSourceWith;
|
||||||
in
|
in
|
||||||
@ -247,24 +295,36 @@ let
|
|||||||
# fromSourceAttributes : SourceAttrs -> Source
|
# fromSourceAttributes : SourceAttrs -> Source
|
||||||
#
|
#
|
||||||
# Inverse of toSourceAttributes for Source objects.
|
# Inverse of toSourceAttributes for Source objects.
|
||||||
fromSourceAttributes = { origSrc, filter, name }:
|
fromSourceAttributes =
|
||||||
|
{
|
||||||
|
origSrc,
|
||||||
|
filter,
|
||||||
|
name,
|
||||||
|
}:
|
||||||
{
|
{
|
||||||
_isLibCleanSourceWith = true;
|
_isLibCleanSourceWith = true;
|
||||||
inherit origSrc filter name;
|
inherit origSrc filter name;
|
||||||
outPath = builtins.path { inherit filter name; path = origSrc; };
|
outPath = builtins.path {
|
||||||
|
inherit filter name;
|
||||||
|
path = origSrc;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
|
|
||||||
pathType = lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
|
pathType =
|
||||||
|
lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
|
||||||
"lib.sources.pathType has been moved to lib.filesystem.pathType."
|
"lib.sources.pathType has been moved to lib.filesystem.pathType."
|
||||||
lib.filesystem.pathType;
|
lib.filesystem.pathType;
|
||||||
|
|
||||||
pathIsDirectory = lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
|
pathIsDirectory =
|
||||||
|
lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
|
||||||
"lib.sources.pathIsDirectory has been moved to lib.filesystem.pathIsDirectory."
|
"lib.sources.pathIsDirectory has been moved to lib.filesystem.pathIsDirectory."
|
||||||
lib.filesystem.pathIsDirectory;
|
lib.filesystem.pathIsDirectory;
|
||||||
|
|
||||||
pathIsRegularFile = lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
|
pathIsRegularFile =
|
||||||
|
lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
|
||||||
"lib.sources.pathIsRegularFile has been moved to lib.filesystem.pathIsRegularFile."
|
"lib.sources.pathIsRegularFile has been moved to lib.filesystem.pathIsRegularFile."
|
||||||
lib.filesystem.pathIsRegularFile;
|
lib.filesystem.pathIsRegularFile;
|
||||||
|
|
||||||
|
@ -491,12 +491,42 @@ rec {
|
|||||||
};
|
};
|
||||||
|
|
||||||
# can execute on 32bit chip
|
# can execute on 32bit chip
|
||||||
gcc_mips32r2_o32 = { gcc = { arch = "mips32r2"; abi = "32"; }; };
|
gcc_mips32r2_o32 = {
|
||||||
gcc_mips32r6_o32 = { gcc = { arch = "mips32r6"; abi = "32"; }; };
|
gcc = {
|
||||||
gcc_mips64r2_n32 = { gcc = { arch = "mips64r2"; abi = "n32"; }; };
|
arch = "mips32r2";
|
||||||
gcc_mips64r6_n32 = { gcc = { arch = "mips64r6"; abi = "n32"; }; };
|
abi = "32";
|
||||||
gcc_mips64r2_64 = { gcc = { arch = "mips64r2"; abi = "64"; }; };
|
};
|
||||||
gcc_mips64r6_64 = { gcc = { arch = "mips64r6"; abi = "64"; }; };
|
};
|
||||||
|
gcc_mips32r6_o32 = {
|
||||||
|
gcc = {
|
||||||
|
arch = "mips32r6";
|
||||||
|
abi = "32";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
gcc_mips64r2_n32 = {
|
||||||
|
gcc = {
|
||||||
|
arch = "mips64r2";
|
||||||
|
abi = "n32";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
gcc_mips64r6_n32 = {
|
||||||
|
gcc = {
|
||||||
|
arch = "mips64r6";
|
||||||
|
abi = "n32";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
gcc_mips64r2_64 = {
|
||||||
|
gcc = {
|
||||||
|
arch = "mips64r2";
|
||||||
|
abi = "64";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
gcc_mips64r6_64 = {
|
||||||
|
gcc = {
|
||||||
|
arch = "mips64r6";
|
||||||
|
abi = "64";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
# based on:
|
# based on:
|
||||||
# https://www.mail-archive.com/qemu-discuss@nongnu.org/msg05179.html
|
# https://www.mail-archive.com/qemu-discuss@nongnu.org/msg05179.html
|
||||||
@ -545,27 +575,38 @@ rec {
|
|||||||
# This function takes a minimally-valid "platform" and returns an
|
# This function takes a minimally-valid "platform" and returns an
|
||||||
# attrset containing zero or more additional attrs which should be
|
# attrset containing zero or more additional attrs which should be
|
||||||
# included in the platform in order to further elaborate it.
|
# included in the platform in order to further elaborate it.
|
||||||
select = platform:
|
select =
|
||||||
|
platform:
|
||||||
# x86
|
# x86
|
||||||
/**/ if platform.isx86 then pc
|
if platform.isx86 then
|
||||||
|
pc
|
||||||
|
|
||||||
# ARM
|
# ARM
|
||||||
else if platform.isAarch32 then let
|
else if platform.isAarch32 then
|
||||||
|
let
|
||||||
version = platform.parsed.cpu.version or null;
|
version = platform.parsed.cpu.version or null;
|
||||||
in if version == null then pc
|
in
|
||||||
else if lib.versionOlder version "6" then sheevaplug
|
if version == null then
|
||||||
else if lib.versionOlder version "7" then raspberrypi
|
pc
|
||||||
else armv7l-hf-multiplatform
|
else if lib.versionOlder version "6" then
|
||||||
|
sheevaplug
|
||||||
|
else if lib.versionOlder version "7" then
|
||||||
|
raspberrypi
|
||||||
|
else
|
||||||
|
armv7l-hf-multiplatform
|
||||||
|
|
||||||
else if platform.isAarch64 then
|
else if platform.isAarch64 then
|
||||||
if platform.isDarwin then apple-m1
|
if platform.isDarwin then apple-m1 else aarch64-multiplatform
|
||||||
else aarch64-multiplatform
|
|
||||||
|
|
||||||
else if platform.isRiscV then riscv-multiplatform
|
else if platform.isRiscV then
|
||||||
|
riscv-multiplatform
|
||||||
|
|
||||||
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.mipsel then (import ./examples.nix { inherit lib; }).mipsel-linux-gnu
|
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.mipsel then
|
||||||
|
(import ./examples.nix { inherit lib; }).mipsel-linux-gnu
|
||||||
|
|
||||||
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.powerpc64le then powernv
|
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.powerpc64le then
|
||||||
|
powernv
|
||||||
|
|
||||||
else { };
|
else
|
||||||
|
{ };
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
# Throws an error if any of our lib tests fail.
|
# Throws an error if any of our lib tests fail.
|
||||||
|
|
||||||
let tests = [ "misc" "systems" ];
|
let
|
||||||
|
tests = [
|
||||||
|
"misc"
|
||||||
|
"systems"
|
||||||
|
];
|
||||||
all = builtins.concatLists (map (f: import (./. + "/${f}.nix")) tests);
|
all = builtins.concatLists (map (f: import (./. + "/${f}.nix")) tests);
|
||||||
in if all == []
|
in
|
||||||
then null
|
if all == [ ] then null else throw (builtins.toJSON all)
|
||||||
else throw (builtins.toJSON all)
|
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
name = lib.mkOption {
|
name = lib.mkOption {
|
||||||
type = types.str;
|
type = types.str;
|
||||||
@ -23,9 +24,11 @@ in {
|
|||||||
default = null;
|
default = null;
|
||||||
};
|
};
|
||||||
keys = lib.mkOption {
|
keys = lib.mkOption {
|
||||||
type = types.listOf (types.submodule {
|
type = types.listOf (
|
||||||
|
types.submodule {
|
||||||
options.fingerprint = lib.mkOption { type = types.str; };
|
options.fingerprint = lib.mkOption { type = types.str; };
|
||||||
});
|
}
|
||||||
|
);
|
||||||
default = [ ];
|
default = [ ];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -1,16 +1,23 @@
|
|||||||
# to run these tests (and the others)
|
# to run these tests (and the others)
|
||||||
# nix-build nixpkgs/lib/tests/release.nix
|
# nix-build nixpkgs/lib/tests/release.nix
|
||||||
# These tests should stay in sync with the comment in maintainers/maintainers-list.nix
|
# These tests should stay in sync with the comment in maintainers/maintainers-list.nix
|
||||||
{ # The pkgs used for dependencies for the testing itself
|
{
|
||||||
pkgs ? import ../.. {}
|
# The pkgs used for dependencies for the testing itself
|
||||||
, lib ? pkgs.lib
|
pkgs ? import ../.. { },
|
||||||
|
lib ? pkgs.lib,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
checkMaintainer = handle: uncheckedAttrs:
|
checkMaintainer =
|
||||||
|
handle: uncheckedAttrs:
|
||||||
let
|
let
|
||||||
prefix = [ "lib" "maintainers" handle ];
|
prefix = [
|
||||||
checkedAttrs = (lib.modules.evalModules {
|
"lib"
|
||||||
|
"maintainers"
|
||||||
|
handle
|
||||||
|
];
|
||||||
|
checkedAttrs =
|
||||||
|
(lib.modules.evalModules {
|
||||||
inherit prefix;
|
inherit prefix;
|
||||||
modules = [
|
modules = [
|
||||||
./maintainer-module.nix
|
./maintainer-module.nix
|
||||||
@ -21,7 +28,8 @@ let
|
|||||||
];
|
];
|
||||||
}).config;
|
}).config;
|
||||||
|
|
||||||
checks = lib.optional (checkedAttrs.github != null && checkedAttrs.githubId == null) ''
|
checks =
|
||||||
|
lib.optional (checkedAttrs.github != null && checkedAttrs.githubId == null) ''
|
||||||
echo ${lib.escapeShellArg (lib.showOption prefix)}': If `github` is specified, `githubId` must be too.'
|
echo ${lib.escapeShellArg (lib.showOption prefix)}': If `github` is specified, `githubId` must be too.'
|
||||||
# Calling this too often would hit non-authenticated API limits, but this
|
# Calling this too often would hit non-authenticated API limits, but this
|
||||||
# shouldn't happen since such errors will get fixed rather quickly
|
# shouldn't happen since such errors will get fixed rather quickly
|
||||||
@ -29,25 +37,40 @@ let
|
|||||||
id=$(jq -r '.id' <<< "$info")
|
id=$(jq -r '.id' <<< "$info")
|
||||||
echo "The GitHub ID for GitHub user ${checkedAttrs.github} is $id:"
|
echo "The GitHub ID for GitHub user ${checkedAttrs.github} is $id:"
|
||||||
echo -e " githubId = $id;\n"
|
echo -e " githubId = $id;\n"
|
||||||
'' ++ lib.optional (checkedAttrs.email == null && checkedAttrs.github == null && checkedAttrs.matrix == null) ''
|
''
|
||||||
|
++
|
||||||
|
lib.optional
|
||||||
|
(checkedAttrs.email == null && checkedAttrs.github == null && checkedAttrs.matrix == null)
|
||||||
|
''
|
||||||
echo ${lib.escapeShellArg (lib.showOption prefix)}': At least one of `email`, `github` or `matrix` must be specified, so that users know how to reach you.'
|
echo ${lib.escapeShellArg (lib.showOption prefix)}': At least one of `email`, `github` or `matrix` must be specified, so that users know how to reach you.'
|
||||||
'' ++ lib.optional (checkedAttrs.email != null && lib.hasSuffix "noreply.github.com" checkedAttrs.email) ''
|
''
|
||||||
|
++
|
||||||
|
lib.optional (checkedAttrs.email != null && lib.hasSuffix "noreply.github.com" checkedAttrs.email)
|
||||||
|
''
|
||||||
echo ${lib.escapeShellArg (lib.showOption prefix)}': If an email address is given, it should allow people to reach you. If you do not want that, you can just provide `github` or `matrix` instead.'
|
echo ${lib.escapeShellArg (lib.showOption prefix)}': If an email address is given, it should allow people to reach you. If you do not want that, you can just provide `github` or `matrix` instead.'
|
||||||
'';
|
'';
|
||||||
in lib.deepSeq checkedAttrs checks;
|
in
|
||||||
|
lib.deepSeq checkedAttrs checks;
|
||||||
|
|
||||||
missingGithubIds = lib.concatLists (lib.mapAttrsToList checkMaintainer lib.maintainers);
|
missingGithubIds = lib.concatLists (lib.mapAttrsToList checkMaintainer lib.maintainers);
|
||||||
|
|
||||||
success = pkgs.runCommand "checked-maintainers-success" { } ">$out";
|
success = pkgs.runCommand "checked-maintainers-success" { } ">$out";
|
||||||
|
|
||||||
failure = pkgs.runCommand "checked-maintainers-failure" {
|
failure =
|
||||||
nativeBuildInputs = [ pkgs.curl pkgs.jq ];
|
pkgs.runCommand "checked-maintainers-failure"
|
||||||
|
{
|
||||||
|
nativeBuildInputs = [
|
||||||
|
pkgs.curl
|
||||||
|
pkgs.jq
|
||||||
|
];
|
||||||
outputHash = "sha256:${lib.fakeSha256}";
|
outputHash = "sha256:${lib.fakeSha256}";
|
||||||
outputHAlgo = "sha256";
|
outputHAlgo = "sha256";
|
||||||
outputHashMode = "flat";
|
outputHashMode = "flat";
|
||||||
SSL_CERT_FILE = "${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt";
|
SSL_CERT_FILE = "${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt";
|
||||||
} ''
|
}
|
||||||
|
''
|
||||||
${lib.concatStringsSep "\n" missingGithubIds}
|
${lib.concatStringsSep "\n" missingGithubIds}
|
||||||
exit 1
|
exit 1
|
||||||
'';
|
'';
|
||||||
in if missingGithubIds == [] then success else failure
|
in
|
||||||
|
if missingGithubIds == [ ] then success else failure
|
||||||
|
@ -1,5 +1,9 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
options.dummy = lib.mkOption { type = lib.types.anything; default = {}; };
|
{
|
||||||
|
options.dummy = lib.mkOption {
|
||||||
|
type = lib.types.anything;
|
||||||
|
default = { };
|
||||||
|
};
|
||||||
freeformType =
|
freeformType =
|
||||||
let
|
let
|
||||||
a = lib.types.attrsOf (lib.types.submodule { options.bar = lib.mkOption { }; });
|
a = lib.types.attrsOf (lib.types.submodule { options.bar = lib.mkOption { }; });
|
||||||
@ -7,7 +11,8 @@
|
|||||||
# modifying types like this breaks type merging.
|
# modifying types like this breaks type merging.
|
||||||
# This test makes sure that type merging is not performed when only a single declaration exists.
|
# This test makes sure that type merging is not performed when only a single declaration exists.
|
||||||
# Don't modify types in practice!
|
# Don't modify types in practice!
|
||||||
a // {
|
a
|
||||||
|
// {
|
||||||
merge = loc: defs: { freeformItems = a.merge loc defs; };
|
merge = loc: defs: { freeformItems = a.merge loc defs; };
|
||||||
};
|
};
|
||||||
config.foo.bar = "ok";
|
config.foo.bar = "ok";
|
||||||
|
@ -46,14 +46,16 @@ in
|
|||||||
|
|
||||||
# Disable the aliased option with a high priority so it
|
# Disable the aliased option with a high priority so it
|
||||||
# should override the next import.
|
# should override the next import.
|
||||||
( { config, lib, ... }:
|
(
|
||||||
|
{ config, lib, ... }:
|
||||||
{
|
{
|
||||||
enableAlias = mkForce false;
|
enableAlias = mkForce false;
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Enable the normal (non-aliased) option.
|
# Enable the normal (non-aliased) option.
|
||||||
( { config, lib, ... }:
|
(
|
||||||
|
{ config, lib, ... }:
|
||||||
{
|
{
|
||||||
enable = true;
|
enable = true;
|
||||||
}
|
}
|
||||||
|
@ -46,14 +46,16 @@ in
|
|||||||
|
|
||||||
# Disable the aliased option, but with a default (low) priority so it
|
# Disable the aliased option, but with a default (low) priority so it
|
||||||
# should be able to be overridden by the next import.
|
# should be able to be overridden by the next import.
|
||||||
( { config, lib, ... }:
|
(
|
||||||
|
{ config, lib, ... }:
|
||||||
{
|
{
|
||||||
enableAlias = mkDefault false;
|
enableAlias = mkDefault false;
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Enable the normal (non-aliased) option.
|
# Enable the normal (non-aliased) option.
|
||||||
( { config, lib, ... }:
|
(
|
||||||
|
{ config, lib, ... }:
|
||||||
{
|
{
|
||||||
enable = true;
|
enable = true;
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, config, ... }: {
|
{ lib, config, ... }:
|
||||||
|
{
|
||||||
options.conditionalWorks = lib.mkOption {
|
options.conditionalWorks = lib.mkOption {
|
||||||
default = !config.value ? foo;
|
default = !config.value ? foo;
|
||||||
};
|
};
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, config, ... }: {
|
{ lib, config, ... }:
|
||||||
|
{
|
||||||
options.isLazy = lib.mkOption {
|
options.isLazy = lib.mkOption {
|
||||||
default = !config.value ? foo;
|
default = !config.value ? foo;
|
||||||
};
|
};
|
||||||
|
@ -1,14 +1,26 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.lazyAttrsOf lib.types.boolByOr;
|
type = lib.types.lazyAttrsOf lib.types.boolByOr;
|
||||||
};
|
};
|
||||||
|
|
||||||
config.value = {
|
config.value = {
|
||||||
falseFalse = lib.mkMerge [ false false ];
|
falseFalse = lib.mkMerge [
|
||||||
trueFalse = lib.mkMerge [ true false ];
|
false
|
||||||
falseTrue = lib.mkMerge [ false true ];
|
false
|
||||||
trueTrue = lib.mkMerge [ true true ];
|
];
|
||||||
|
trueFalse = lib.mkMerge [
|
||||||
|
true
|
||||||
|
false
|
||||||
|
];
|
||||||
|
falseTrue = lib.mkMerge [
|
||||||
|
false
|
||||||
|
true
|
||||||
|
];
|
||||||
|
trueTrue = lib.mkMerge [
|
||||||
|
true
|
||||||
|
true
|
||||||
|
];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
sub = {
|
sub = {
|
||||||
nixosOk = lib.mkOption {
|
nixosOk = lib.mkOption {
|
||||||
@ -40,16 +41,14 @@
|
|||||||
];
|
];
|
||||||
config = {
|
config = {
|
||||||
_module.freeformType = lib.types.anything;
|
_module.freeformType = lib.types.anything;
|
||||||
ok =
|
ok = lib.evalModules {
|
||||||
lib.evalModules {
|
|
||||||
class = "nixos";
|
class = "nixos";
|
||||||
modules = [
|
modules = [
|
||||||
./module-class-is-nixos.nix
|
./module-class-is-nixos.nix
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
fail =
|
fail = lib.evalModules {
|
||||||
lib.evalModules {
|
|
||||||
class = "nixos";
|
class = "nixos";
|
||||||
modules = [
|
modules = [
|
||||||
./module-class-is-nixos.nix
|
./module-class-is-nixos.nix
|
||||||
@ -57,12 +56,12 @@
|
|||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
fail-anon =
|
fail-anon = lib.evalModules {
|
||||||
lib.evalModules {
|
|
||||||
class = "nixos";
|
class = "nixos";
|
||||||
modules = [
|
modules = [
|
||||||
./module-class-is-nixos.nix
|
./module-class-is-nixos.nix
|
||||||
{ _file = "foo.nix#darwinModules.default";
|
{
|
||||||
|
_file = "foo.nix#darwinModules.default";
|
||||||
_class = "darwin";
|
_class = "darwin";
|
||||||
config = { };
|
config = { };
|
||||||
imports = [ ];
|
imports = [ ];
|
||||||
@ -70,7 +69,11 @@
|
|||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
sub.nixosOk = { _class = "nixos"; };
|
sub.nixosOk = {
|
||||||
sub.nixosFail = { imports = [ ./module-class-is-darwin.nix ]; };
|
_class = "nixos";
|
||||||
|
};
|
||||||
|
sub.nixosFail = {
|
||||||
|
imports = [ ./module-class-is-darwin.nix ];
|
||||||
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
let
|
let
|
||||||
deathtrapArgs = lib.mapAttrs
|
deathtrapArgs = lib.mapAttrs (
|
||||||
(k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute.")
|
k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute."
|
||||||
(lib.functionArgs lib.mkOption);
|
) (lib.functionArgs lib.mkOption);
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
|
|
||||||
let
|
let
|
||||||
submod = { ... }: {
|
submod =
|
||||||
|
{ ... }:
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
enable = lib.mkOption {
|
enable = lib.mkOption {
|
||||||
default = false;
|
default = false;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.either lib.types.int lib.types.str;
|
type = lib.types.either lib.types.int lib.types.str;
|
||||||
};
|
};
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.lazyAttrsOf (lib.types.str // { emptyValue.value = "empty"; });
|
type = lib.types.lazyAttrsOf (lib.types.str // { emptyValue.value = "empty"; });
|
||||||
default = { };
|
default = { };
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
{ lib, ... }: let
|
{ lib, ... }:
|
||||||
|
let
|
||||||
pkgs.hello = {
|
pkgs.hello = {
|
||||||
type = "derivation";
|
type = "derivation";
|
||||||
pname = "hello";
|
pname = "hello";
|
||||||
};
|
};
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
package = lib.mkPackageOption pkgs "hello" { };
|
package = lib.mkPackageOption pkgs "hello" { };
|
||||||
|
|
||||||
@ -46,8 +48,14 @@ in {
|
|||||||
pkgsText = "myPkgs";
|
pkgsText = "myPkgs";
|
||||||
};
|
};
|
||||||
|
|
||||||
packageFromOtherSet = let myPkgs = {
|
packageFromOtherSet =
|
||||||
hello = pkgs.hello // { pname = "hello-other"; };
|
let
|
||||||
}; in lib.mkPackageOption myPkgs "hello" { };
|
myPkgs = {
|
||||||
|
hello = pkgs.hello // {
|
||||||
|
pname = "hello-other";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
in
|
||||||
|
lib.mkPackageOption myPkgs "hello" { };
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.oneOf [
|
type = lib.types.oneOf [
|
||||||
lib.types.int
|
lib.types.int
|
||||||
|
@ -3,7 +3,9 @@
|
|||||||
{
|
{
|
||||||
options.set = lib.mkOption {
|
options.set = lib.mkOption {
|
||||||
default = { };
|
default = { };
|
||||||
example = { a = 1; };
|
example = {
|
||||||
|
a = 1;
|
||||||
|
};
|
||||||
type = lib.types.attrsOf lib.types.int;
|
type = lib.types.attrsOf lib.types.int;
|
||||||
description = ''
|
description = ''
|
||||||
Some descriptive text
|
Some descriptive text
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
inherit (lib.evalModules {
|
inherit
|
||||||
|
(lib.evalModules {
|
||||||
modules = [
|
modules = [
|
||||||
{
|
{
|
||||||
options.inner = lib.mkOption {
|
options.inner = lib.mkOption {
|
||||||
@ -9,17 +11,22 @@
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
}) type;
|
})
|
||||||
|
type
|
||||||
|
;
|
||||||
default = { };
|
default = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
config.submodule = lib.mkMerge [
|
config.submodule = lib.mkMerge [
|
||||||
({ lib, ... }: {
|
(
|
||||||
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.outer = lib.mkOption {
|
options.outer = lib.mkOption {
|
||||||
type = lib.types.bool;
|
type = lib.types.bool;
|
||||||
default = false;
|
default = false;
|
||||||
};
|
};
|
||||||
})
|
}
|
||||||
|
)
|
||||||
{
|
{
|
||||||
inner = true;
|
inner = true;
|
||||||
outer = true;
|
outer = true;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith {
|
||||||
modules = [
|
modules = [
|
||||||
@ -14,12 +15,15 @@
|
|||||||
};
|
};
|
||||||
|
|
||||||
config.submodule = lib.mkMerge [
|
config.submodule = lib.mkMerge [
|
||||||
({ lib, ... }: {
|
(
|
||||||
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.outer = lib.mkOption {
|
options.outer = lib.mkOption {
|
||||||
type = lib.types.bool;
|
type = lib.types.bool;
|
||||||
default = false;
|
default = false;
|
||||||
};
|
};
|
||||||
})
|
}
|
||||||
|
)
|
||||||
{
|
{
|
||||||
inner = true;
|
inner = true;
|
||||||
outer = true;
|
outer = true;
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
{ lib, ... }: let
|
{ lib, ... }:
|
||||||
|
let
|
||||||
sub.options.config = lib.mkOption {
|
sub.options.config = lib.mkOption {
|
||||||
type = lib.types.bool;
|
type = lib.types.bool;
|
||||||
default = false;
|
default = false;
|
||||||
};
|
};
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith {
|
||||||
modules = [ sub ];
|
modules = [ sub ];
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith {
|
||||||
modules = [
|
modules = [
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
{ lib, ... }: let
|
{ lib, ... }:
|
||||||
|
let
|
||||||
sub.options.config = lib.mkOption {
|
sub.options.config = lib.mkOption {
|
||||||
type = lib.types.bool;
|
type = lib.types.bool;
|
||||||
default = false;
|
default = false;
|
||||||
};
|
};
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith {
|
||||||
modules = [ sub ];
|
modules = [ sub ];
|
||||||
|
@ -1,12 +1,16 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith {
|
||||||
modules = [
|
modules = [
|
||||||
({ lib, ... }: {
|
(
|
||||||
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.foo = lib.mkOption {
|
options.foo = lib.mkOption {
|
||||||
default = lib.foo;
|
default = lib.foo;
|
||||||
};
|
};
|
||||||
})
|
}
|
||||||
|
)
|
||||||
];
|
];
|
||||||
specialArgs.lib = lib // {
|
specialArgs.lib = lib // {
|
||||||
foo = "foo";
|
foo = "foo";
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
{ lib, moduleType, ... }:
|
{ lib, moduleType, ... }:
|
||||||
let inherit (lib) mkOption types;
|
let
|
||||||
|
inherit (lib) mkOption types;
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
options.variants = mkOption {
|
options.variants = mkOption {
|
||||||
|
@ -1,8 +1,15 @@
|
|||||||
{ lib ? import ../.., modules ? [] }:
|
{
|
||||||
|
lib ? import ../..,
|
||||||
|
modules ? [ ],
|
||||||
|
}:
|
||||||
|
|
||||||
{
|
{
|
||||||
inherit (lib.evalModules {
|
inherit
|
||||||
|
(lib.evalModules {
|
||||||
inherit modules;
|
inherit modules;
|
||||||
specialArgs.modulesPath = ./.;
|
specialArgs.modulesPath = ./.;
|
||||||
}) config options;
|
})
|
||||||
|
config
|
||||||
|
options
|
||||||
|
;
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,19 @@
|
|||||||
{ config, lib, ... }:
|
{ config, lib, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types mkOption setDefaultModuleLocation evalModules;
|
inherit (lib)
|
||||||
inherit (types) deferredModule lazyAttrsOf submodule str raw enum;
|
types
|
||||||
|
mkOption
|
||||||
|
setDefaultModuleLocation
|
||||||
|
evalModules
|
||||||
|
;
|
||||||
|
inherit (types)
|
||||||
|
deferredModule
|
||||||
|
lazyAttrsOf
|
||||||
|
submodule
|
||||||
|
str
|
||||||
|
raw
|
||||||
|
enum
|
||||||
|
;
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
options = {
|
options = {
|
||||||
@ -13,7 +25,8 @@ in
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
config = {
|
config = {
|
||||||
deferred = { ... }:
|
deferred =
|
||||||
|
{ ... }:
|
||||||
# this should be an attrset, so this fails
|
# this should be an attrset, so this fails
|
||||||
true;
|
true;
|
||||||
};
|
};
|
||||||
|
@ -1,7 +1,14 @@
|
|||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types mkOption setDefaultModuleLocation;
|
inherit (lib) types mkOption setDefaultModuleLocation;
|
||||||
inherit (types) deferredModule lazyAttrsOf submodule str raw enum;
|
inherit (types)
|
||||||
|
deferredModule
|
||||||
|
lazyAttrsOf
|
||||||
|
submodule
|
||||||
|
str
|
||||||
|
raw
|
||||||
|
enum
|
||||||
|
;
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
imports = [
|
imports = [
|
||||||
@ -9,10 +16,14 @@ in
|
|||||||
# - nodes.<name>
|
# - nodes.<name>
|
||||||
# - default
|
# - default
|
||||||
# where all nodes include the default
|
# where all nodes include the default
|
||||||
({ config, ... }: {
|
(
|
||||||
|
{ config, ... }:
|
||||||
|
{
|
||||||
_file = "generic.nix";
|
_file = "generic.nix";
|
||||||
options.nodes = mkOption {
|
options.nodes = mkOption {
|
||||||
type = lazyAttrsOf (submodule { imports = [ config.default ]; });
|
type = lazyAttrsOf (submodule {
|
||||||
|
imports = [ config.default ];
|
||||||
|
});
|
||||||
default = { };
|
default = { };
|
||||||
};
|
};
|
||||||
options.default = mkOption {
|
options.default = mkOption {
|
||||||
@ -22,12 +33,18 @@ in
|
|||||||
Module that is included in all nodes.
|
Module that is included in all nodes.
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
{
|
{
|
||||||
_file = "default-1.nix";
|
_file = "default-1.nix";
|
||||||
default = { config, ... }: {
|
default =
|
||||||
options.settingsDict = lib.mkOption { type = lazyAttrsOf str; default = {}; };
|
{ config, ... }:
|
||||||
|
{
|
||||||
|
options.settingsDict = lib.mkOption {
|
||||||
|
type = lazyAttrsOf str;
|
||||||
|
default = { };
|
||||||
|
};
|
||||||
options.bottom = lib.mkOption { type = enum [ ]; };
|
options.bottom = lib.mkOption { type = enum [ ]; };
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -49,7 +66,9 @@ in
|
|||||||
|
|
||||||
{
|
{
|
||||||
_file = "nodes-foo-c-is-a.nix";
|
_file = "nodes-foo-c-is-a.nix";
|
||||||
nodes.foo = { config, ... }: {
|
nodes.foo =
|
||||||
|
{ config, ... }:
|
||||||
|
{
|
||||||
settingsDict.c = config.settingsDict.a;
|
settingsDict.c = config.settingsDict.a;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,24 @@
|
|||||||
{ config, ... }: {
|
{ config, ... }:
|
||||||
class = { "just" = "data"; };
|
{
|
||||||
|
class = {
|
||||||
|
"just" = "data";
|
||||||
|
};
|
||||||
a = "one";
|
a = "one";
|
||||||
b = "two";
|
b = "two";
|
||||||
meta = "meta";
|
meta = "meta";
|
||||||
|
|
||||||
_module.args.result =
|
_module.args.result =
|
||||||
let r = builtins.removeAttrs config [ "_module" ];
|
let
|
||||||
in builtins.trace (builtins.deepSeq r r) (r == {
|
r = builtins.removeAttrs config [ "_module" ];
|
||||||
|
in
|
||||||
|
builtins.trace (builtins.deepSeq r r) (
|
||||||
|
r == {
|
||||||
a = "one";
|
a = "one";
|
||||||
b = "two";
|
b = "two";
|
||||||
class = { "just" = "data"; };
|
class = {
|
||||||
|
"just" = "data";
|
||||||
|
};
|
||||||
meta = "meta";
|
meta = "meta";
|
||||||
});
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,8 @@
|
|||||||
{
|
{
|
||||||
|
|
||||||
# Always defined, but the value depends on the presence of an option.
|
# Always defined, but the value depends on the presence of an option.
|
||||||
config.set = {
|
config.set =
|
||||||
|
{
|
||||||
value = if options ? set.enable then 360 else 7;
|
value = if options ? set.enable then 360 else 7;
|
||||||
}
|
}
|
||||||
# Only define if possible.
|
# Only define if possible.
|
||||||
|
@ -5,7 +5,8 @@
|
|||||||
{
|
{
|
||||||
|
|
||||||
# Always defined, but the value depends on the presence of an option.
|
# Always defined, but the value depends on the presence of an option.
|
||||||
config = {
|
config =
|
||||||
|
{
|
||||||
value = if options ? enable then 360 else 7;
|
value = if options ? enable then 360 else 7;
|
||||||
}
|
}
|
||||||
# Only define if possible.
|
# Only define if possible.
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
{ config, ... }: {
|
{ config, ... }:
|
||||||
|
{
|
||||||
settingsDict.a = config.settingsDict.b;
|
settingsDict.a = config.settingsDict.b;
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
imports = [{
|
imports = [
|
||||||
|
{
|
||||||
value = lib.mkDefault "def";
|
value = lib.mkDefault "def";
|
||||||
}];
|
}
|
||||||
|
];
|
||||||
|
|
||||||
value = lib.mkMerge [
|
value = lib.mkMerge [
|
||||||
(lib.mkIf false "nope")
|
(lib.mkIf false "nope")
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
{ config, lib, ... }:
|
{ config, lib, ... }:
|
||||||
let inherit (lib) types mkOption attrNames;
|
let
|
||||||
|
inherit (lib) types mkOption attrNames;
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
options = {
|
options = {
|
||||||
@ -16,7 +17,11 @@ in
|
|||||||
variants.foo.variants.bar.attrs.z = 1;
|
variants.foo.variants.bar.attrs.z = 1;
|
||||||
variants.foo.variants.foo.attrs.c = 3;
|
variants.foo.variants.foo.attrs.c = 3;
|
||||||
resultFoo = lib.concatMapStringsSep " " toString (attrNames config.variants.foo.attrs);
|
resultFoo = lib.concatMapStringsSep " " toString (attrNames config.variants.foo.attrs);
|
||||||
resultFooBar = lib.concatMapStringsSep " " toString (attrNames config.variants.foo.variants.bar.attrs);
|
resultFooBar = lib.concatMapStringsSep " " toString (
|
||||||
resultFooFoo = lib.concatMapStringsSep " " toString (attrNames config.variants.foo.variants.foo.attrs);
|
attrNames config.variants.foo.variants.bar.attrs
|
||||||
|
);
|
||||||
|
resultFooFoo = lib.concatMapStringsSep " " toString (
|
||||||
|
attrNames config.variants.foo.variants.foo.attrs
|
||||||
|
);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
{ ... }:
|
{ ... }:
|
||||||
|
|
||||||
{
|
{
|
||||||
disabledModules = [ "define-enable.nix" "declare-enable.nix" ];
|
disabledModules = [
|
||||||
|
"define-enable.nix"
|
||||||
|
"declare-enable.nix"
|
||||||
|
];
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,9 @@
|
|||||||
let
|
let
|
||||||
inherit (lib) mkOption types;
|
inherit (lib) mkOption types;
|
||||||
|
|
||||||
moduleWithKey = { config, ... }: {
|
moduleWithKey =
|
||||||
|
{ config, ... }:
|
||||||
|
{
|
||||||
config = {
|
config = {
|
||||||
enable = true;
|
enable = true;
|
||||||
};
|
};
|
||||||
|
@ -1,6 +1,20 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
imports = [
|
imports = [
|
||||||
(lib.doRename { from = ["a" "b"]; to = ["c" "d" "e"]; warn = true; use = x: x; visible = true; })
|
(lib.doRename {
|
||||||
|
from = [
|
||||||
|
"a"
|
||||||
|
"b"
|
||||||
|
];
|
||||||
|
to = [
|
||||||
|
"c"
|
||||||
|
"d"
|
||||||
|
"e"
|
||||||
|
];
|
||||||
|
warn = true;
|
||||||
|
use = x: x;
|
||||||
|
visible = true;
|
||||||
|
})
|
||||||
];
|
];
|
||||||
options = {
|
options = {
|
||||||
c.d.e = lib.mkOption { };
|
c.d.e = lib.mkOption { };
|
||||||
|
@ -4,7 +4,12 @@
|
|||||||
services.foo.enable = true;
|
services.foo.enable = true;
|
||||||
services.foo.bar = "baz";
|
services.foo.bar = "baz";
|
||||||
result =
|
result =
|
||||||
assert config.services.foos == { "" = { bar = "baz"; }; };
|
assert
|
||||||
|
config.services.foos == {
|
||||||
|
"" = {
|
||||||
|
bar = "baz";
|
||||||
|
};
|
||||||
|
};
|
||||||
true;
|
true;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,12 @@
|
|||||||
config = {
|
config = {
|
||||||
services.foos."".bar = "baz";
|
services.foos."".bar = "baz";
|
||||||
result =
|
result =
|
||||||
assert config.services.foos == { "" = { bar = "baz"; }; };
|
assert
|
||||||
|
config.services.foos == {
|
||||||
|
"" = {
|
||||||
|
bar = "baz";
|
||||||
|
};
|
||||||
|
};
|
||||||
assert config.services.foo.bar == "baz";
|
assert config.services.foo.bar == "baz";
|
||||||
true;
|
true;
|
||||||
};
|
};
|
||||||
|
@ -13,25 +13,41 @@
|
|||||||
*/
|
*/
|
||||||
{ config, lib, ... }:
|
{ config, lib, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) mkOption mkEnableOption types doRename;
|
inherit (lib)
|
||||||
|
mkOption
|
||||||
|
mkEnableOption
|
||||||
|
types
|
||||||
|
doRename
|
||||||
|
;
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
options = {
|
options = {
|
||||||
services.foo.enable = mkEnableOption "foo";
|
services.foo.enable = mkEnableOption "foo";
|
||||||
services.foos = mkOption {
|
services.foos = mkOption {
|
||||||
type = types.attrsOf (types.submodule {
|
type = types.attrsOf (
|
||||||
|
types.submodule {
|
||||||
options = {
|
options = {
|
||||||
bar = mkOption { type = types.str; };
|
bar = mkOption { type = types.str; };
|
||||||
};
|
};
|
||||||
});
|
}
|
||||||
|
);
|
||||||
default = { };
|
default = { };
|
||||||
};
|
};
|
||||||
result = mkOption { };
|
result = mkOption { };
|
||||||
};
|
};
|
||||||
imports = [
|
imports = [
|
||||||
(doRename {
|
(doRename {
|
||||||
from = [ "services" "foo" "bar" ];
|
from = [
|
||||||
to = [ "services" "foos" "" "bar" ];
|
"services"
|
||||||
|
"foo"
|
||||||
|
"bar"
|
||||||
|
];
|
||||||
|
to = [
|
||||||
|
"services"
|
||||||
|
"foos"
|
||||||
|
""
|
||||||
|
"bar"
|
||||||
|
];
|
||||||
visible = true;
|
visible = true;
|
||||||
warn = false;
|
warn = false;
|
||||||
use = x: x;
|
use = x: x;
|
||||||
|
@ -1,6 +1,20 @@
|
|||||||
{ lib, config, ... }: {
|
{ lib, config, ... }:
|
||||||
|
{
|
||||||
imports = [
|
imports = [
|
||||||
(lib.doRename { from = ["a" "b"]; to = ["c" "d" "e"]; warn = true; use = x: x; visible = true; })
|
(lib.doRename {
|
||||||
|
from = [
|
||||||
|
"a"
|
||||||
|
"b"
|
||||||
|
];
|
||||||
|
to = [
|
||||||
|
"c"
|
||||||
|
"d"
|
||||||
|
"e"
|
||||||
|
];
|
||||||
|
warn = true;
|
||||||
|
use = x: x;
|
||||||
|
visible = true;
|
||||||
|
})
|
||||||
];
|
];
|
||||||
options = {
|
options = {
|
||||||
warnings = lib.mkOption { type = lib.types.listOf lib.types.str; };
|
warnings = lib.mkOption { type = lib.types.listOf lib.types.str; };
|
||||||
|
@ -24,18 +24,12 @@ in
|
|||||||
All options to be rendered, without any visibility filtering applied.
|
All options to be rendered, without any visibility filtering applied.
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
config.docs =
|
config.docs = lib.zipAttrsWith (
|
||||||
lib.zipAttrsWith
|
name: values:
|
||||||
(name: values:
|
|
||||||
if length values > 1 then
|
if length values > 1 then
|
||||||
traceListSeq values
|
traceListSeq values abort "Multiple options with the same name: ${name}"
|
||||||
abort "Multiple options with the same name: ${name}"
|
|
||||||
else
|
else
|
||||||
assert length values == 1;
|
assert length values == 1;
|
||||||
head values
|
head values
|
||||||
)
|
) (map (opt: { ${opt.name} = opt; }) (lib.optionAttrSetToDocList options));
|
||||||
(map
|
|
||||||
(opt: { ${opt.name} = opt; })
|
|
||||||
(lib.optionAttrSetToDocList options)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
|
|
||||||
options = {
|
options = {
|
||||||
int = lib.mkOption {
|
int = lib.mkOption {
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
{ lib
|
{
|
||||||
, extendModules
|
lib,
|
||||||
, ...
|
extendModules,
|
||||||
|
...
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
@ -17,9 +18,10 @@ in
|
|||||||
options.sub = mkOption {
|
options.sub = mkOption {
|
||||||
default = { };
|
default = { };
|
||||||
type = types.submodule (
|
type = types.submodule (
|
||||||
{ config
|
{
|
||||||
, extendModules
|
config,
|
||||||
, ...
|
extendModules,
|
||||||
|
...
|
||||||
}:
|
}:
|
||||||
{
|
{
|
||||||
options.value = mkOption {
|
options.value = mkOption {
|
||||||
@ -30,11 +32,14 @@ in
|
|||||||
default = { };
|
default = { };
|
||||||
inherit
|
inherit
|
||||||
(extendModules {
|
(extendModules {
|
||||||
modules = [{
|
modules = [
|
||||||
|
{
|
||||||
specialisation = mkOverride 0 { };
|
specialisation = mkOverride 0 { };
|
||||||
}];
|
}
|
||||||
|
];
|
||||||
})
|
})
|
||||||
type;
|
type
|
||||||
|
;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@ -43,6 +48,5 @@ in
|
|||||||
|
|
||||||
{ config.sub.value = 1; }
|
{ config.sub.value = 1; }
|
||||||
|
|
||||||
|
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
freeformType = with lib.types; attrsOf (either str (attrsOf str));
|
freeformType = with lib.types; attrsOf (either str (attrsOf str));
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
freeformType = with lib.types; lazyAttrsOf (either str (lazyAttrsOf str));
|
freeformType = with lib.types; lazyAttrsOf (either str (lazyAttrsOf str));
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
let
|
let
|
||||||
deathtrapArgs = lib.mapAttrs
|
deathtrapArgs = lib.mapAttrs (
|
||||||
(k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute.")
|
k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute."
|
||||||
(lib.functionArgs lib.mkOption);
|
) (lib.functionArgs lib.mkOption);
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
options.nest.foo = lib.mkOption {
|
options.nest.foo = lib.mkOption {
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, config, ... }: {
|
{ lib, config, ... }:
|
||||||
|
{
|
||||||
options.foo = lib.mkOption {
|
options.foo = lib.mkOption {
|
||||||
type = lib.types.nullOr lib.types.str;
|
type = lib.types.nullOr lib.types.str;
|
||||||
default = null;
|
default = null;
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
{ lib, options, ... }: with lib.types; {
|
{ lib, options, ... }:
|
||||||
|
with lib.types;
|
||||||
|
{
|
||||||
|
|
||||||
options.fooDeclarations = lib.mkOption {
|
options.fooDeclarations = lib.mkOption {
|
||||||
default = (options.free.type.getSubOptions [ ])._freeformOptions.foo.declarations;
|
default = (options.free.type.getSubOptions [ ])._freeformOptions.foo.declarations;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, config, ... }: {
|
{ lib, config, ... }:
|
||||||
|
{
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.nullOr lib.types.str;
|
type = lib.types.nullOr lib.types.str;
|
||||||
default = null;
|
default = null;
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
|
|
||||||
{ lib, config, ... }:
|
{ lib, config, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
fun = lib.mkOption {
|
fun = lib.mkOption {
|
||||||
type = types.functionTo (types.listOf types.str);
|
type = types.functionTo (types.listOf types.str);
|
||||||
@ -10,11 +10,13 @@ in {
|
|||||||
|
|
||||||
result = lib.mkOption {
|
result = lib.mkOption {
|
||||||
type = types.str;
|
type = types.str;
|
||||||
default = toString (config.fun {
|
default = toString (
|
||||||
|
config.fun {
|
||||||
a = "a";
|
a = "a";
|
||||||
b = "b";
|
b = "b";
|
||||||
c = "c";
|
c = "c";
|
||||||
});
|
}
|
||||||
|
);
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
{ lib, config, ... }:
|
{ lib, config, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
fun = lib.mkOption {
|
fun = lib.mkOption {
|
||||||
type = types.functionTo (types.attrsOf types.str);
|
type = types.functionTo (types.attrsOf types.str);
|
||||||
@ -9,11 +10,15 @@ in {
|
|||||||
|
|
||||||
result = lib.mkOption {
|
result = lib.mkOption {
|
||||||
type = types.str;
|
type = types.str;
|
||||||
default = toString (lib.attrValues (config.fun {
|
default = toString (
|
||||||
|
lib.attrValues (
|
||||||
|
config.fun {
|
||||||
a = "a";
|
a = "a";
|
||||||
b = "b";
|
b = "b";
|
||||||
c = "c";
|
c = "c";
|
||||||
}));
|
}
|
||||||
|
)
|
||||||
|
);
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
{ lib, config, ... }:
|
{ lib, config, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
fun = lib.mkOption {
|
fun = lib.mkOption {
|
||||||
type = types.functionTo (types.listOf types.str);
|
type = types.functionTo (types.listOf types.str);
|
||||||
@ -9,11 +10,13 @@ in {
|
|||||||
|
|
||||||
result = lib.mkOption {
|
result = lib.mkOption {
|
||||||
type = types.str;
|
type = types.str;
|
||||||
default = toString (config.fun {
|
default = toString (
|
||||||
|
config.fun {
|
||||||
a = "a";
|
a = "a";
|
||||||
b = "b";
|
b = "b";
|
||||||
c = "c";
|
c = "c";
|
||||||
});
|
}
|
||||||
|
);
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
{ lib, config, options, ... }:
|
{
|
||||||
|
lib,
|
||||||
|
config,
|
||||||
|
options,
|
||||||
|
...
|
||||||
|
}:
|
||||||
let
|
let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
in
|
in
|
||||||
@ -6,47 +11,51 @@ in
|
|||||||
imports = [
|
imports = [
|
||||||
|
|
||||||
# fun.<function-body>.a
|
# fun.<function-body>.a
|
||||||
({ ... }: {
|
(
|
||||||
|
{ ... }:
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
fun = lib.mkOption {
|
fun = lib.mkOption {
|
||||||
type = types.functionTo (types.submodule {
|
type = types.functionTo (
|
||||||
|
types.submodule {
|
||||||
options.a = lib.mkOption { default = "a"; };
|
options.a = lib.mkOption { default = "a"; };
|
||||||
});
|
}
|
||||||
|
);
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
# fun.<function-body>.b
|
# fun.<function-body>.b
|
||||||
({ ... }: {
|
(
|
||||||
|
{ ... }:
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
fun = lib.mkOption {
|
fun = lib.mkOption {
|
||||||
type = types.functionTo (types.submodule {
|
type = types.functionTo (
|
||||||
|
types.submodule {
|
||||||
options.b = lib.mkOption { default = "b"; };
|
options.b = lib.mkOption { default = "b"; };
|
||||||
});
|
}
|
||||||
|
);
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
})
|
}
|
||||||
|
)
|
||||||
];
|
];
|
||||||
|
|
||||||
options = {
|
options = {
|
||||||
result = lib.mkOption
|
result = lib.mkOption {
|
||||||
{
|
|
||||||
type = types.str;
|
type = types.str;
|
||||||
default = lib.concatStringsSep " " (lib.attrValues (config.fun (throw "shouldn't use input param")));
|
default = lib.concatStringsSep " " (
|
||||||
|
lib.attrValues (config.fun (throw "shouldn't use input param"))
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
optionsResult = lib.mkOption
|
optionsResult = lib.mkOption {
|
||||||
{
|
|
||||||
type = types.str;
|
type = types.str;
|
||||||
default = lib.concatStringsSep " "
|
default = lib.concatStringsSep " " (
|
||||||
(lib.concatLists
|
lib.concatLists (
|
||||||
(lib.mapAttrsToList
|
lib.mapAttrsToList (k: v: if k == "_module" then [ ] else [ (lib.showOption v.loc) ]) (
|
||||||
(k: v:
|
|
||||||
if k == "_module"
|
|
||||||
then [ ]
|
|
||||||
else [ (lib.showOption v.loc) ]
|
|
||||||
)
|
|
||||||
(
|
|
||||||
(options.fun.type.getSubOptions [ "fun" ])
|
(options.fun.type.getSubOptions [ "fun" ])
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -54,8 +63,7 @@ in
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
config.fun = lib.mkMerge
|
config.fun = lib.mkMerge [
|
||||||
[
|
|
||||||
(input: { b = "bee"; })
|
(input: { b = "bee"; })
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
{ lib, config, ... }:
|
{ lib, config, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
fun = lib.mkOption {
|
fun = lib.mkOption {
|
||||||
type = types.functionTo types.str;
|
type = types.functionTo types.str;
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
|
|
||||||
{ lib, config, ... }:
|
{ lib, config, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
fun = lib.mkOption {
|
fun = lib.mkOption {
|
||||||
type = types.functionTo types.str;
|
type = types.functionTo types.str;
|
||||||
|
@ -24,7 +24,10 @@
|
|||||||
escapedString = ''
|
escapedString = ''
|
||||||
'\
|
'\
|
||||||
'';
|
'';
|
||||||
tuple = mkTuple [ (mkInt32 1) [ "foo" ] ];
|
tuple = mkTuple [
|
||||||
|
(mkInt32 1)
|
||||||
|
[ "foo" ]
|
||||||
|
];
|
||||||
maybe1 = mkNothing type.string;
|
maybe1 = mkNothing type.string;
|
||||||
maybe2 = mkJust (mkUint32 4);
|
maybe2 = mkJust (mkUint32 4);
|
||||||
variant = mkVariant "foo";
|
variant = mkVariant "foo";
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
{ lib, custom, ... }:
|
{ lib, custom, ... }:
|
||||||
|
|
||||||
{
|
{
|
||||||
imports = []
|
imports = [ ] ++ lib.optional custom ./define-enable-force.nix;
|
||||||
++ lib.optional custom ./define-enable-force.nix;
|
|
||||||
}
|
}
|
||||||
|
@ -8,4 +8,3 @@
|
|||||||
];
|
];
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
{ a ? false, lib, ... }: {
|
{
|
||||||
|
a ? false,
|
||||||
|
lib,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
result = lib.mkOption { };
|
result = lib.mkOption { };
|
||||||
};
|
};
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ config, lib, ... }: {
|
{ config, lib, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
_file = "optionTypeFile.nix";
|
_file = "optionTypeFile.nix";
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ config, lib, ... }: {
|
{ config, lib, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
options.theType = lib.mkOption {
|
options.theType = lib.mkOption {
|
||||||
type = lib.types.optionType;
|
type = lib.types.optionType;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
# unlikely mistake, but we can catch any attrset with _type
|
# unlikely mistake, but we can catch any attrset with _type
|
||||||
result = lib.evalModules { modules = [ ]; };
|
result = lib.evalModules { modules = [ ]; };
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
result.here = lib.types.str;
|
result.here = lib.types.str;
|
||||||
};
|
};
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
result = lib.types.str;
|
result = lib.types.str;
|
||||||
};
|
};
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, config, ... }: {
|
{ lib, config, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
options = {
|
options = {
|
||||||
processedToplevel = lib.mkOption {
|
processedToplevel = lib.mkOption {
|
||||||
|
@ -4,16 +4,22 @@ let
|
|||||||
in
|
in
|
||||||
{
|
{
|
||||||
imports = [
|
imports = [
|
||||||
({ config, ... }: {
|
(
|
||||||
|
{ config, ... }:
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
meta.foo = mkOption {
|
meta.foo = mkOption {
|
||||||
type = types.listOf types.str;
|
type = types.listOf types.str;
|
||||||
};
|
};
|
||||||
result = mkOption { default = lib.concatStringsSep " " config.meta.foo; };
|
result = mkOption { default = lib.concatStringsSep " " config.meta.foo; };
|
||||||
};
|
};
|
||||||
})
|
}
|
||||||
|
)
|
||||||
{
|
{
|
||||||
meta.foo = [ "one" "two" ];
|
meta.foo = [
|
||||||
|
"one"
|
||||||
|
"two"
|
||||||
|
];
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,10 @@
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
config.weird = args@{ ... /* note the lack of a `lib` argument */ }:
|
config.weird =
|
||||||
|
args@{
|
||||||
|
... # note the lack of a `lib` argument
|
||||||
|
}:
|
||||||
assert args.lib == { };
|
assert args.lib == { };
|
||||||
assert args.specialArgs == { lib = { }; };
|
assert args.specialArgs == { lib = { }; };
|
||||||
{
|
{
|
||||||
|
@ -1,14 +1,20 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
default = { };
|
default = { };
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith {
|
||||||
modules = [ ({ options, ... }: {
|
modules = [
|
||||||
|
(
|
||||||
|
{ options, ... }:
|
||||||
|
{
|
||||||
options.value = lib.mkOption { };
|
options.value = lib.mkOption { };
|
||||||
|
|
||||||
options.internalFiles = lib.mkOption {
|
options.internalFiles = lib.mkOption {
|
||||||
default = options.value.files;
|
default = options.value.files;
|
||||||
};
|
};
|
||||||
})];
|
}
|
||||||
|
)
|
||||||
|
];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -2,7 +2,9 @@
|
|||||||
|
|
||||||
let
|
let
|
||||||
defs = lib.modules.mergeAttrDefinitionsWithPrio options._module.args;
|
defs = lib.modules.mergeAttrDefinitionsWithPrio options._module.args;
|
||||||
assertLazy = pos: throw "${pos.file}:${toString pos.line}:${toString pos.column}: The test must not evaluate this the assertLazy thunk, but it did. Unexpected strictness leads to unexpected errors and performance problems.";
|
assertLazy =
|
||||||
|
pos:
|
||||||
|
throw "${pos.file}:${toString pos.line}:${toString pos.column}: The test must not evaluate this the assertLazy thunk, but it did. Unexpected strictness leads to unexpected errors and performance problems.";
|
||||||
in
|
in
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.anything;
|
type = lib.types.anything;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.anything;
|
type = lib.types.anything;
|
||||||
@ -12,7 +13,11 @@
|
|||||||
value.path = ./.;
|
value.path = ./.;
|
||||||
value.null = null;
|
value.null = null;
|
||||||
value.float = 0.1;
|
value.float = 0.1;
|
||||||
value.list = [1 "a" {x=null;}];
|
value.list = [
|
||||||
|
1
|
||||||
|
"a"
|
||||||
|
{ x = null; }
|
||||||
|
];
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
value.int = 0;
|
value.int = 0;
|
||||||
@ -21,7 +26,11 @@
|
|||||||
value.path = ./.;
|
value.path = ./.;
|
||||||
value.null = null;
|
value.null = null;
|
||||||
value.float = 0.1;
|
value.float = 0.1;
|
||||||
value.list = [1 "a" {x=null;}];
|
value.list = [
|
||||||
|
1
|
||||||
|
"a"
|
||||||
|
{ x = null; }
|
||||||
|
];
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, config, ... }: {
|
{ lib, config, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
options.valueIsFunction = lib.mkOption {
|
options.valueIsFunction = lib.mkOption {
|
||||||
default = lib.mapAttrs (name: lib.isFunction) config.value;
|
default = lib.mapAttrs (name: lib.isFunction) config.value;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.anything;
|
type = lib.types.anything;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.anything;
|
type = lib.types.anything;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.anything;
|
type = lib.types.anything;
|
||||||
|
@ -8,7 +8,9 @@ in
|
|||||||
type = types.attrTag {
|
type = types.attrTag {
|
||||||
int = types.int;
|
int = types.int;
|
||||||
};
|
};
|
||||||
default = { int = 1; };
|
default = {
|
||||||
|
int = 1;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
{ lib, config, options, ... }:
|
{
|
||||||
|
lib,
|
||||||
|
config,
|
||||||
|
options,
|
||||||
|
...
|
||||||
|
}:
|
||||||
let
|
let
|
||||||
inherit (lib) mkOption types;
|
inherit (lib) mkOption types;
|
||||||
forceDeep = x: builtins.deepSeq x x;
|
forceDeep = x: builtins.deepSeq x x;
|
||||||
@ -7,15 +12,16 @@ in
|
|||||||
{
|
{
|
||||||
options = {
|
options = {
|
||||||
intStrings = mkOption {
|
intStrings = mkOption {
|
||||||
type = types.attrsOf
|
type = types.attrsOf (
|
||||||
(types.attrTag {
|
types.attrTag {
|
||||||
left = mkOption {
|
left = mkOption {
|
||||||
type = types.int;
|
type = types.int;
|
||||||
};
|
};
|
||||||
right = mkOption {
|
right = mkOption {
|
||||||
type = types.str;
|
type = types.str;
|
||||||
};
|
};
|
||||||
});
|
}
|
||||||
|
);
|
||||||
};
|
};
|
||||||
nested = mkOption {
|
nested = mkOption {
|
||||||
type = types.attrTag {
|
type = types.attrTag {
|
||||||
@ -87,9 +93,18 @@ in
|
|||||||
config = {
|
config = {
|
||||||
intStrings.syntaxError = 1;
|
intStrings.syntaxError = 1;
|
||||||
intStrings.syntaxError2 = { };
|
intStrings.syntaxError2 = { };
|
||||||
intStrings.syntaxError3 = { a = true; b = true; };
|
intStrings.syntaxError3 = {
|
||||||
intStrings.syntaxError4 = lib.mkMerge [ { a = true; } { b = true; } ];
|
a = true;
|
||||||
intStrings.mergeError = lib.mkMerge [ { int = throw "do not eval"; } { string = throw "do not eval"; } ];
|
b = true;
|
||||||
|
};
|
||||||
|
intStrings.syntaxError4 = lib.mkMerge [
|
||||||
|
{ a = true; }
|
||||||
|
{ b = true; }
|
||||||
|
];
|
||||||
|
intStrings.mergeError = lib.mkMerge [
|
||||||
|
{ int = throw "do not eval"; }
|
||||||
|
{ string = throw "do not eval"; }
|
||||||
|
];
|
||||||
intStrings.badTagError.rite = throw "do not eval";
|
intStrings.badTagError.rite = throw "do not eval";
|
||||||
intStrings.badTagTypeError.left = "bad";
|
intStrings.badTagTypeError.left = "bad";
|
||||||
intStrings.numberOne.left = 1;
|
intStrings.numberOne.left = 1;
|
||||||
@ -109,7 +124,12 @@ in
|
|||||||
assert config.docs."submodules.<name>.foo.bar".type == "signed integer";
|
assert config.docs."submodules.<name>.foo.bar".type == "signed integer";
|
||||||
assert config.docs."submodules.<name>.qux".type == "string";
|
assert config.docs."submodules.<name>.qux".type == "string";
|
||||||
assert config.docs."submodules.<name>.qux".declarations == [ __curPos.file ];
|
assert config.docs."submodules.<name>.qux".declarations == [ __curPos.file ];
|
||||||
assert config.docs."submodules.<name>.qux".loc == [ "submodules" "<name>" "qux" ];
|
assert
|
||||||
|
config.docs."submodules.<name>.qux".loc == [
|
||||||
|
"submodules"
|
||||||
|
"<name>"
|
||||||
|
"qux"
|
||||||
|
];
|
||||||
assert config.docs."submodules.<name>.qux".name == "submodules.<name>.qux";
|
assert config.docs."submodules.<name>.qux".name == "submodules.<name>.qux";
|
||||||
assert config.docs."submodules.<name>.qux".description == "A qux for when you don't want a foo";
|
assert config.docs."submodules.<name>.qux".description == "A qux for when you don't want a foo";
|
||||||
assert config.docs."submodules.<name>.qux".readOnly == false;
|
assert config.docs."submodules.<name>.qux".readOnly == false;
|
||||||
@ -119,17 +139,30 @@ in
|
|||||||
assert options.submodules.declarations == [ __curPos.file ];
|
assert options.submodules.declarations == [ __curPos.file ];
|
||||||
assert lib.length options.submodules.declarationPositions == 1;
|
assert lib.length options.submodules.declarationPositions == 1;
|
||||||
assert (lib.head options.submodules.declarationPositions).file == __curPos.file;
|
assert (lib.head options.submodules.declarationPositions).file == __curPos.file;
|
||||||
assert options.merged.declarations == [ __curPos.file __curPos.file ];
|
assert
|
||||||
|
options.merged.declarations == [
|
||||||
|
__curPos.file
|
||||||
|
__curPos.file
|
||||||
|
];
|
||||||
assert lib.length options.merged.declarationPositions == 2;
|
assert lib.length options.merged.declarationPositions == 2;
|
||||||
assert (lib.elemAt options.merged.declarationPositions 0).file == __curPos.file;
|
assert (lib.elemAt options.merged.declarationPositions 0).file == __curPos.file;
|
||||||
assert (lib.elemAt options.merged.declarationPositions 1).file == __curPos.file;
|
assert (lib.elemAt options.merged.declarationPositions 1).file == __curPos.file;
|
||||||
assert (lib.elemAt options.merged.declarationPositions 0).line != (lib.elemAt options.merged.declarationPositions 1).line;
|
assert
|
||||||
assert mergedSubOption.declarations == [ __curPos.file __curPos.file ];
|
(lib.elemAt options.merged.declarationPositions 0).line
|
||||||
|
!= (lib.elemAt options.merged.declarationPositions 1).line;
|
||||||
|
assert
|
||||||
|
mergedSubOption.declarations == [
|
||||||
|
__curPos.file
|
||||||
|
__curPos.file
|
||||||
|
];
|
||||||
assert lib.length mergedSubOption.declarationPositions == 2;
|
assert lib.length mergedSubOption.declarationPositions == 2;
|
||||||
assert (lib.elemAt mergedSubOption.declarationPositions 0).file == __curPos.file;
|
assert (lib.elemAt mergedSubOption.declarationPositions 0).file == __curPos.file;
|
||||||
assert (lib.elemAt mergedSubOption.declarationPositions 1).file == __curPos.file;
|
assert (lib.elemAt mergedSubOption.declarationPositions 1).file == __curPos.file;
|
||||||
assert (lib.elemAt mergedSubOption.declarationPositions 0).line != (lib.elemAt mergedSubOption.declarationPositions 1).line;
|
assert
|
||||||
|
(lib.elemAt mergedSubOption.declarationPositions 0).line
|
||||||
|
!= (lib.elemAt mergedSubOption.declarationPositions 1).line;
|
||||||
assert lib.length config.docs."merged.<name>.extensible".declarations == 2;
|
assert lib.length config.docs."merged.<name>.extensible".declarations == 2;
|
||||||
true);
|
true
|
||||||
|
);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -4,13 +4,18 @@ let
|
|||||||
in
|
in
|
||||||
{
|
{
|
||||||
options.examples = mkOption {
|
options.examples = mkOption {
|
||||||
type = types.lazyAttrsOf
|
type = types.lazyAttrsOf (
|
||||||
(types.unique
|
types.unique {
|
||||||
{ message = "We require a single definition, because seeing the whole value at once helps us maintain critical invariants of our system."; }
|
message = "We require a single definition, because seeing the whole value at once helps us maintain critical invariants of our system.";
|
||||||
(types.attrsOf types.str));
|
} (types.attrsOf types.str)
|
||||||
|
);
|
||||||
};
|
};
|
||||||
imports = [
|
imports = [
|
||||||
{ examples.merged = { b = "bee"; }; }
|
{
|
||||||
|
examples.merged = {
|
||||||
|
b = "bee";
|
||||||
|
};
|
||||||
|
}
|
||||||
{ examples.override = lib.mkForce { b = "bee"; }; }
|
{ examples.override = lib.mkForce { b = "bee"; }; }
|
||||||
];
|
];
|
||||||
config.examples = {
|
config.examples = {
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user