diff --git a/.github/labeler.yml b/.github/labeler.yml index e036eb3c8448..0e6fd3e2677b 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -13,7 +13,7 @@ "documentation": - changed-files: - - any-glob-to-any-file: "doc/manual/*" + - any-glob-to-any-file: "doc/manual/**/*" - any-glob-to-any-file: "src/nix/**/*.md" "store": @@ -40,4 +40,4 @@ - any-glob-to-any-file: "src/*/tests/**/*" # Functional and integration tests - any-glob-to-any-file: "tests/functional/**/*" - + diff --git a/.gitignore b/.gitignore index 6996ca484aa4..52aaec23fc28 100644 --- a/.gitignore +++ b/.gitignore @@ -154,6 +154,8 @@ result-* .vscode/ .idea/ +.pre-commit-config.yaml + # clangd and possibly more .cache/ diff --git a/.version b/.version index f48f82fa2c47..e9763f6bfed6 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.22.0 +2.23.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 887bd480273e..38f5d43b7108 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -27,6 +27,8 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy). 1. Search for related issues that cover what you're going to work on. It could help to mention there that you will work on the issue. + We strongly recommend first-time contributors not to propose new features but rather fix tightly-scoped problems in order to build trust and a working relationship with maintainers. + Issues labeled [good first issue](https://github.com/NixOS/nix/labels/good%20first%20issue) should be relatively easy to fix and are likely to get merged quickly. Pull requests addressing issues labeled [idea approved](https://github.com/NixOS/nix/labels/idea%20approved) or [RFC](https://github.com/NixOS/nix/labels/RFC) are especially welcomed by maintainers and will receive prioritised review. diff --git a/Makefile b/Makefile index ba5a6cd92f87..ea0754fa5a28 100644 --- a/Makefile +++ b/Makefile @@ -27,6 +27,7 @@ makefiles = \ ifdef HOST_UNIX makefiles += \ scripts/local.mk \ + maintainers/local.mk \ misc/bash/local.mk \ misc/fish/local.mk \ misc/zsh/local.mk \ diff --git a/configure.ac b/configure.ac index 1d327d51dfb3..b2a5794b5034 100644 --- a/configure.ac +++ b/configure.ac @@ -63,7 +63,6 @@ AC_SYS_LARGEFILE # Solaris-specific stuff. -AC_STRUCT_DIRENT_D_TYPE case "$host_os" in solaris*) # Solaris requires -lsocket -lnsl for network functions @@ -317,6 +316,17 @@ case "$host_os" in [CXXFLAGS="$LIBSECCOMP_CFLAGS $CXXFLAGS"]) have_seccomp=1 AC_DEFINE([HAVE_SECCOMP], [1], [Whether seccomp is available and should be used for sandboxing.]) + AC_COMPILE_IFELSE([ + AC_LANG_SOURCE([[ + #include + #ifndef __SNR_fchmodat2 + # error "Missing support for fchmodat2" + #endif + ]]) + ], [], [ + echo "libseccomp is missing __SNR_fchmodat2. Please provide libseccomp 2.5.5 or later" + exit 1 + ]) else have_seccomp= fi diff --git a/doc/external-api/local.mk b/doc/external-api/local.mk index c739bdaf0efb..ae2b44db8f02 100644 --- a/doc/external-api/local.mk +++ b/doc/external-api/local.mk @@ -1,4 +1,4 @@ -$(docdir)/external-api/html/index.html $(docdir)/external-api/latex: $(d)/doxygen.cfg +$(docdir)/external-api/html/index.html $(docdir)/external-api/latex: $(d)/doxygen.cfg src/lib*-c/*.h mkdir -p $(docdir)/external-api { cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/external-api" ; } | doxygen - diff --git a/doc/internal-api/local.mk b/doc/internal-api/local.mk index bf2c4dedea13..be9b7bb55f48 100644 --- a/doc/internal-api/local.mk +++ b/doc/internal-api/local.mk @@ -1,4 +1,4 @@ -$(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg +$(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg src/**/*.hh mkdir -p $(docdir)/internal-api { cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen - diff --git a/doc/manual/book.toml b/doc/manual/book.toml index d524dbb1309e..73fb7e75e24a 100644 --- a/doc/manual/book.toml +++ b/doc/manual/book.toml @@ -6,8 +6,6 @@ additional-css = ["custom.css"] additional-js = ["redirects.js"] edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}" git-repository-url = "https://github.com/NixOS/nix" -fold.enable = true -fold.level = 1 [preprocessor.anchors] renderers = ["html"] diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js index 25648969d1cd..ec5645ea7c25 100644 --- a/doc/manual/redirects.js +++ b/doc/manual/redirects.js @@ -290,10 +290,10 @@ const redirects = { "ssec-gc-roots": "package-management/garbage-collector-roots.html", "chap-package-management": "package-management/index.html", "sec-profiles": "package-management/profiles.html", - "ssec-s3-substituter": "package-management/s3-substituter.html", - "ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache", - "ssec-s3-substituter-authenticated-reads": "package-management/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache", - "ssec-s3-substituter-authenticated-writes": "package-management/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache", + "ssec-s3-substituter": "store/types/s3-substituter.html", + "ssec-s3-substituter-anonymous-reads": "store/types/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache", + "ssec-s3-substituter-authenticated-reads": "store/types/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache", + "ssec-s3-substituter-authenticated-writes": "store/types/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache", "sec-sharing-packages": "package-management/sharing-packages.html", "ssec-ssh-substituter": "package-management/ssh-substituter.html", "chap-quick-start": "quick-start.html", diff --git a/doc/manual/rl-next/nix-eval-derivations.md b/doc/manual/rl-next/nix-eval-derivations.md deleted file mode 100644 index ed0a7338464d..000000000000 --- a/doc/manual/rl-next/nix-eval-derivations.md +++ /dev/null @@ -1,13 +0,0 @@ ---- -synopsis: "`nix eval` prints derivations as `.drv` paths" -prs: 10200 ---- - -`nix eval` will now print derivations as their `.drv` paths, rather than as -attribute sets. This makes commands like `nix eval nixpkgs#bash` terminate -instead of infinitely looping into recursive self-referential attributes: - -```ShellSession -$ nix eval nixpkgs#bash -«derivation /nix/store/m32cbgbd598f4w299g0hwyv7gbw6rqcg-bash-5.2p26.drv» -``` diff --git a/doc/manual/rl-next/remove-repl-flake.md b/doc/manual/rl-next/remove-repl-flake.md deleted file mode 100644 index 23298e2edea2..000000000000 --- a/doc/manual/rl-next/remove-repl-flake.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -synopsis: Remove experimental repl-flake -significance: significant -issues: 10103 -prs: 10299 ---- - -The `repl-flake` experimental feature has been removed. The `nix repl` command now works like the rest of the new CLI in that `nix repl {path}` now tries to load a flake at `{path}` (or fails if the `flakes` experimental feature isn't enabled).* diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index d9044fbdac68..7f0fb2e9d938 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -27,6 +27,7 @@ - [Language Constructs](language/constructs.md) - [String interpolation](language/string-interpolation.md) - [Lookup path](language/constructs/lookup-path.md) + - [String context](language/string-context.md) - [Operators](language/operators.md) - [Derivations](language/derivations.md) - [Advanced Attributes](language/advanced-attributes.md) @@ -42,7 +43,6 @@ - [Serving a Nix store via HTTP](package-management/binary-cache-substituter.md) - [Copying Closures via SSH](package-management/copy-closure.md) - [Serving a Nix store via SSH](package-management/ssh-substituter.md) - - [Serving a Nix store via S3](package-management/s3-substituter.md) - [Remote Builds](advanced-topics/distributed-builds.md) - [Tuning Cores and Jobs](advanced-topics/cores-vs-jobs.md) - [Verifying Build Reproducibility](advanced-topics/diff-hook.md) @@ -112,6 +112,7 @@ - [Store Path Specification](protocols/store-path.md) - [Nix Archive (NAR) Format](protocols/nix-archive.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) +- [C API](c-api.md) - [Glossary](glossary.md) - [Contributing](contributing/index.md) - [Hacking](contributing/hacking.md) @@ -122,6 +123,7 @@ - [C++ style guide](contributing/cxx.md) - [Releases](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} + - [Release 2.22 (2024-04-23)](release-notes/rl-2.22.md) - [Release 2.21 (2024-03-11)](release-notes/rl-2.21.md) - [Release 2.20 (2024-01-29)](release-notes/rl-2.20.md) - [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md) diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md index 52acd039c105..ddabaeb4df14 100644 --- a/doc/manual/src/advanced-topics/distributed-builds.md +++ b/doc/manual/src/advanced-topics/distributed-builds.md @@ -12,14 +12,14 @@ machine is accessible via SSH and that it has Nix installed. You can test whether connecting to the remote Nix instance works, e.g. ```console -$ nix store info --store ssh://mac +$ nix store ping --store ssh://mac ``` will try to connect to the machine named `mac`. It is possible to specify an SSH identity file as part of the remote store URI, e.g. ```console -$ nix store info --store ssh://mac?ssh-key=/home/alice/my-key +$ nix store ping --store ssh://mac?ssh-key=/home/alice/my-key ``` Since builds should be non-interactive, the key should not have a diff --git a/doc/manual/src/c-api.md b/doc/manual/src/c-api.md new file mode 100644 index 000000000000..29df0b644195 --- /dev/null +++ b/doc/manual/src/c-api.md @@ -0,0 +1,16 @@ +# C API + +Nix provides a C API with the intent of [_becoming_](https://github.com/NixOS/nix/milestone/52) a stable API, which it is currently not. +It is in development. + +See: +- C API documentation for a recent build of master + - [Getting Started] + - [Index] +- [Matrix Room *Nix Bindings*](https://matrix.to/#/#nix-bindings:nixos.org) for discussion and questions. +- [Stabilisation Milestone](https://github.com/NixOS/nix/milestone/52) +- [Other C API PRs and issues](https://github.com/NixOS/nix/labels/c%20api) +- [Contributing C API Documentation](contributing/documentation.md#c-api-documentation), including how to build it locally. + +[Getting Started]: https://hydra.nixos.org/job/nix/master/external-api-docs/latest/download-by-type/doc/external-api-docs +[Index]: https://hydra.nixos.org/job/nix/master/external-api-docs/latest/download-by-type/doc/external-api-docs/globals.html diff --git a/doc/manual/src/command-ref/conf-file-prefix.md b/doc/manual/src/command-ref/conf-file-prefix.md index 1e4085977542..627806cfbda8 100644 --- a/doc/manual/src/command-ref/conf-file-prefix.md +++ b/doc/manual/src/command-ref/conf-file-prefix.md @@ -66,5 +66,12 @@ Configuration options can be set on the command line, overriding the values set The `extra-` prefix is supported for settings that take a list of items (e.g. `--extra-trusted users alice` or `--option extra-trusted-users alice`). +## Integer settings + +Settings that have an integer type support the suffixes `K`, `M`, `G` +and `T`. These cause the specified value to be multiplied by 2^10, +2^20, 2^30 and 2^40, respectively. For instance, `--min-free 1M` is +equivalent to `--min-free 1048576`. + # Available settings diff --git a/doc/manual/src/command-ref/nix-copy-closure.md b/doc/manual/src/command-ref/nix-copy-closure.md index eb1693e1e830..46d381f5d628 100644 --- a/doc/manual/src/command-ref/nix-copy-closure.md +++ b/doc/manual/src/command-ref/nix-copy-closure.md @@ -78,14 +78,14 @@ authentication, you can avoid typing the passphrase with `ssh-agent`. Copy Firefox with all its dependencies to a remote machine: ```console -$ nix-copy-closure --to alice@itchy.labs $(type -tP firefox) +$ nix-copy-closure --to alice@itchy.example.org $(type -P firefox) ``` Copy Subversion from a remote machine and then install it into a user environment: ```console -$ nix-copy-closure --from alice@itchy.labs \ +$ nix-copy-closure --from alice@itchy.example.org \ /nix/store/0dj0503hjxy5mbwlafv1rsbdiyx1gkdy-subversion-1.4.4 $ nix-env --install /nix/store/0dj0503hjxy5mbwlafv1rsbdiyx1gkdy-subversion-1.4.4 ``` diff --git a/doc/manual/src/command-ref/nix-env.md b/doc/manual/src/command-ref/nix-env.md index 941723216096..c6f627365a51 100644 --- a/doc/manual/src/command-ref/nix-env.md +++ b/doc/manual/src/command-ref/nix-env.md @@ -47,39 +47,83 @@ These pages can be viewed offline: Example: `nix-env --help --install` +# Package sources + +`nix-env` can obtain packages from multiple sources: + +- An attribute set of derivations from: + - The [default Nix expression](@docroot@/command-ref/files/default-nix-expression.md) (by default) + - A Nix file, specified via `--file` + - A [profile](@docroot@/command-ref/files/profiles.md), specified via `--from-profile` + - A Nix expression that is a function which takes default expression as argument, specified via `--from-expression` +- A [store path](@docroot@/store/store-path.md) + # Selectors -Several commands, such as `nix-env --query ` and `nix-env --install `, take a list of -arguments that specify the packages on which to operate. These are -extended regular expressions that must match the entire name of the -package. (For details on regular expressions, see **regex**(7).) The match is -case-sensitive. The regular expression can optionally be followed by a -dash and a version number; if omitted, any version of the package will -match. Here are some examples: - - - `firefox`\ - Matches the package name `firefox` and any version. - - - `firefox-32.0`\ - Matches the package name `firefox` and version `32.0`. - - - `gtk\\+`\ - Matches the package name `gtk+`. The `+` character must be escaped - using a backslash to prevent it from being interpreted as a - quantifier, and the backslash must be escaped in turn with another - backslash to ensure that the shell passes it on. - - - `.\*`\ - Matches any package name. This is the default for most commands. - - - `'.*zip.*'`\ - Matches any package name containing the string `zip`. Note the dots: - `'*zip*'` does not work, because in a regular expression, the - character `*` is interpreted as a quantifier. - - - `'.*(firefox|chromium).*'`\ - Matches any package name containing the strings `firefox` or - `chromium`. +Several operations, such as [`nix-env --query`](./nix-env/query.md) and [`nix-env --install`](./nix-env/install.md), take a list of *arguments* that specify the packages on which to operate. + +Packages are identified based on a `name` part and a `version` part of a [symbolic derivation name](@docroot@/language/derivations.md#attr-names): + +- `name`: Everything up to but not including the first dash (`-`) that is *not* followed by a letter. +- `version`: The rest, excluding the separating dash. + +> **Example** +> +> `nix-env` parses the symbolic derivation name `apache-httpd-2.0.48` as: +> +> ```json +> { +> "name": "apache-httpd", +> "version": "2.0.48" +> } +> ``` + +> **Example** +> +> `nix-env` parses the symbolic derivation name `firefox.*` as: +> +> ```json +> { +> "name": "firefox.*", +> "version": "" +> } +> ``` + +The `name` parts of the *arguments* to `nix-env` are treated as extended regular expressions and matched against the `name` parts of derivation names in the package source. +The match is case-sensitive. +The regular expression can optionally be followed by a dash (`-`) and a version number; if omitted, any version of the package will match. +For details on regular expressions, see [**regex**(7)](https://linux.die.net/man/7/regex). + +> **Example** +> +> Common patterns for finding package names with `nix-env`: +> +> - `firefox` +> +> Matches the package name `firefox` and any version. +> +> - `firefox-32.0` +> +> Matches the package name `firefox` and version `32.0`. +> +> - `gtk\\+` +> +> Matches the package name `gtk+`. +> The `+` character must be escaped using a backslash (`\`) to prevent it from being interpreted as a quantifier, and the backslash must be escaped in turn with another backslash to ensure that the shell passes it on. +> +> - `.\*` +> +> Matches any package name. +> This is the default for most commands. +> +> - `'.*zip.*'` +> +> Matches any package name containing the string `zip`. +> Note the dots: `'*zip*'` does not work, because in a regular expression, the character `*` is interpreted as a quantifier. +> +> - `'.*(firefox|chromium).*'` +> +> Matches any package name containing the strings `firefox` or `chromium`. # Files diff --git a/doc/manual/src/command-ref/nix-env/install.md b/doc/manual/src/command-ref/nix-env/install.md index c1fff50e80f7..a2cc7f8620c9 100644 --- a/doc/manual/src/command-ref/nix-env/install.md +++ b/doc/manual/src/command-ref/nix-env/install.md @@ -14,14 +14,13 @@ # Description -The install operation creates a new user environment. +The `--install` operation creates a new user environment. It is based on the current generation of the active [profile](@docroot@/command-ref/files/profiles.md), to which a set of [store paths] described by *args* is added. [store paths]: @docroot@/glossary.md#gloss-store-path The arguments *args* map to store paths in a number of possible ways: - - By default, *args* is a set of [derivation] names denoting derivations in the [default Nix expression]. These are [realised], and the resulting output paths are installed. Currently installed derivations with a name equal to the name of a derivation being added are removed unless the option `--preserve-installed` is specified. @@ -50,7 +49,7 @@ The arguments *args* map to store paths in a number of possible ways: Show the attribute paths of available packages with [`nix-env --query`](./query.md): ```console - nix-env --query --available --attr-path` + nix-env --query --available --attr-path ``` - If `--from-profile` *path* is given, *args* is a set of names diff --git a/doc/manual/src/command-ref/nix-store/import.md b/doc/manual/src/command-ref/nix-store/import.md index 2711316a7d63..0ecde177c907 100644 --- a/doc/manual/src/command-ref/nix-store/import.md +++ b/doc/manual/src/command-ref/nix-store/import.md @@ -8,11 +8,13 @@ # Description -The operation `--import` reads a serialisation of a set of store paths -produced by `nix-store --export` from standard input and adds those -store paths to the Nix store. Paths that already exist in the Nix store -are ignored. If a path refers to another path that doesn’t exist in the -Nix store, the import fails. +The operation `--import` reads a serialisation of a set of [store objects](@docroot@/glossary.md#gloss-store-object) produced by [`nix-store --export`](./export.md) from standard input, and adds those store objects to the specified [Nix store](@docroot@/store/index.md). +Paths that already exist in the target Nix store are ignored. +If a path [refers](@docroot@/glossary.md#gloss-reference) to another path that doesn’t exist in the target Nix store, the import fails. + +> **Note** +> +> For efficient transfer of closures to remote machines over SSH, use [`nix-copy-closure`](@docroot@/command-ref/nix-copy-closure.md). {{#include ./opt-common.md}} diff --git a/doc/manual/src/contributing/documentation.md b/doc/manual/src/contributing/documentation.md index e7f94ab8c353..359fdb5569a4 100644 --- a/doc/manual/src/contributing/documentation.md +++ b/doc/manual/src/contributing/documentation.md @@ -207,8 +207,9 @@ or inside `nix-shell` or `nix develop`: # xdg-open ./outputs/doc/share/doc/nix/internal-api/html/index.html ``` -## C API documentation (experimental) +## C API documentation +Note that the C API is not yet stable. [C API documentation] is available online. You can also build and view it yourself: diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index c43149c4d96a..08ba84faa532 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -273,6 +273,29 @@ Configure your editor to use the `clangd` from the `.#native-clangStdenvPackages > Some other editors (e.g. Emacs, Vim) need a plugin to support LSP servers in general (e.g. [lsp-mode](https://github.com/emacs-lsp/lsp-mode) for Emacs and [vim-lsp](https://github.com/prabirshrestha/vim-lsp) for vim). > Editor-specific setup is typically opinionated, so we will not cover it here in more detail. +## Formatting and pre-commit hooks + +You may run the formatters as a one-off using: + +```console +make format +``` + +If you'd like to run the formatters before every commit, install the hooks: + +``` +pre-commit-hooks-install +``` + +This installs [pre-commit](https://pre-commit.com) using [cachix/git-hooks.nix](https://github.com/cachix/git-hooks.nix). + +When making a commit, pay attention to the console output. +If it fails, run `git add --patch` to approve the suggestions _and commit again_. + +To refresh pre-commit hook's config file, do the following: +1. Exit the development shell and start it again by running `nix develop`. +2. If you also use the pre-commit hook, also run `pre-commit-hooks-install` again. + ## Add a release note `doc/manual/rl-next` contains release notes entries for all unreleased changes. diff --git a/doc/manual/src/contributing/testing.md b/doc/manual/src/contributing/testing.md index 31c39c16c095..607914ba3e8d 100644 --- a/doc/manual/src/contributing/testing.md +++ b/doc/manual/src/contributing/testing.md @@ -162,14 +162,14 @@ ran test tests/functional/${testName}.sh... [PASS] or without `make`: ```shell-session -$ ./mk/run-test.sh tests/functional/${testName}.sh tests/functional/init.sh +$ ./mk/run-test.sh tests/functional/${testName}.sh ran test tests/functional/${testName}.sh... [PASS] ``` To see the complete output, one can also run: ```shell-session -$ ./mk/debug-test.sh tests/functional/${testName}.sh tests/functional/init.sh +$ ./mk/debug-test.sh tests/functional/${testName}.sh +(${testName}.sh:1) foo output from foo +(${testName}.sh:2) bar @@ -204,7 +204,7 @@ edit it like so: Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point: ```shell-session -$ ./mk/debug-test.sh tests/functional/${testName}.sh tests/functional/init.sh +$ ./mk/debug-test.sh tests/functional/${testName}.sh ... + gdb blash blub GNU gdb (GDB) 12.1 diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 66e4628c0dd1..cbffda187eba 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -218,6 +218,17 @@ - [output closure]{#gloss-output-closure}\ The [closure] of an [output path]. It only contains what is [reachable] from the output. +- [deriving path]{#gloss-deriving-path} + + Deriving paths are a way to refer to [store objects][store object] that ar not yet [realised][realise]. + This is necessary because, in general and particularly for [content-addressed derivations][content-addressed derivation], the [output path] of an [output] is not known in advance. + There are two forms: + + - *constant*: just a [store path] + It can be made [valid][validity] by copying it into the store: from the evaluator, command line interface or another store. + + - *output*: a pair of a [store path] to a [derivation] and an [output] name. + - [deriver]{#gloss-deriver} The [store derivation] that produced an [output path]. @@ -295,6 +306,25 @@ [path]: ./language/values.md#type-path [attribute name]: ./language/values.md#attribute-set +- [base directory]{#gloss-base-directory} + + The location from which relative paths are resolved. + + - For expressions in a file, the base directory is the directory containing that file. + This is analogous to the directory of a [base URL](https://datatracker.ietf.org/doc/html/rfc1808#section-3.3). + + + + - For expressions written in command line arguments with [`--expr`](@docroot@/command-ref/opt-common.html#opt-expr), the base directory is the current working directory. + + [base directory]: #gloss-base-directory + - [experimental feature]{#gloss-experimental-feature} Not yet stabilized functionality guarded by named experimental feature flags. diff --git a/doc/manual/src/installation/env-variables.md b/doc/manual/src/installation/env-variables.md index db98f52ff6e4..0350904211ac 100644 --- a/doc/manual/src/installation/env-variables.md +++ b/doc/manual/src/installation/env-variables.md @@ -53,7 +53,8 @@ ssl-cert-file = /etc/ssl/my-certificate-bundle.crt The Nix installer has special handling for these proxy-related environment variables: `http_proxy`, `https_proxy`, `ftp_proxy`, -`no_proxy`, `HTTP_PROXY`, `HTTPS_PROXY`, `FTP_PROXY`, `NO_PROXY`. +`all_proxy`, `no_proxy`, `HTTP_PROXY`, `HTTPS_PROXY`, `FTP_PROXY`, +`ALL_PROXY`, `NO_PROXY`. If any of these variables are set when running the Nix installer, then the installer will create an override file at diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/src/installation/installing-binary.md index 0dc9891598af..385008d8c161 100644 --- a/doc/manual/src/installation/installing-binary.md +++ b/doc/manual/src/installation/installing-binary.md @@ -50,7 +50,7 @@ Supported systems: To explicitly instruct the installer to perform a multi-user installation on your system: ```console -$ curl -L https://nixos.org/nix/install | sh -s -- --daemon +$ bash <(curl -L https://nixos.org/nix/install) --daemon ``` You can run this under your usual user account or `root`. @@ -61,7 +61,7 @@ The script will invoke `sudo` as needed. To explicitly select a single-user installation on your system: ```console -$ curl -L https://nixos.org/nix/install | sh -s -- --no-daemon +$ bash <(curl -L https://nixos.org/nix/install) --no-daemon ``` In a single-user installation, `/nix` is owned by the invoking user. diff --git a/doc/manual/src/installation/uninstall.md b/doc/manual/src/installation/uninstall.md index 9ead5e53c5c9..590327fea1bc 100644 --- a/doc/manual/src/installation/uninstall.md +++ b/doc/manual/src/installation/uninstall.md @@ -1,16 +1,8 @@ # Uninstalling Nix -## Single User - -If you have a [single-user installation](./installing-binary.md#single-user-installation) of Nix, uninstall it by running: - -```console -$ rm -rf /nix -``` - ## Multi User -Removing a [multi-user installation](./installing-binary.md#multi-user-installation) of Nix is more involved, and depends on the operating system. +Removing a [multi-user installation](./installing-binary.md#multi-user-installation) depends on the operating system. ### Linux @@ -51,7 +43,15 @@ which you may remove. ### macOS -1. Edit `/etc/zshrc`, `/etc/bashrc`, and `/etc/bash.bashrc` to remove the lines sourcing `nix-daemon.sh`, which should look like this: +1. If system-wide shell initialisation files haven't been altered since installing Nix, use the backups made by the installer: + + ```console + sudo mv /etc/zshrc.backup-before-nix /etc/zshrc + sudo mv /etc/bashrc.backup-before-nix /etc/bashrc + sudo mv /etc/bash.bashrc.backup-before-nix /etc/bash.bashrc + ``` + + Otherwise, edit `/etc/zshrc`, `/etc/bashrc`, and `/etc/bash.bashrc` to remove the lines sourcing `nix-daemon.sh`, which should look like this: ```bash # Nix @@ -61,18 +61,6 @@ which you may remove. # End Nix ``` - If these files haven't been altered since installing Nix you can simply put - the backups back in place: - - ```console - sudo mv /etc/zshrc.backup-before-nix /etc/zshrc - sudo mv /etc/bashrc.backup-before-nix /etc/bashrc - sudo mv /etc/bash.bashrc.backup-before-nix /etc/bash.bashrc - ``` - - This will stop shells from sourcing the file and bringing everything you - installed using Nix in scope. - 2. Stop and remove the Nix daemon services: ```console @@ -82,8 +70,7 @@ which you may remove. sudo rm /Library/LaunchDaemons/org.nixos.darwin-store.plist ``` - This stops the Nix daemon and prevents it from being started next time you - boot the system. + This stops the Nix daemon and prevents it from being started next time you boot the system. 3. Remove the `nixbld` group and the `_nixbuildN` users: @@ -94,25 +81,42 @@ which you may remove. This will remove all the build users that no longer serve a purpose. -4. Edit fstab using `sudo vifs` to remove the line mounting the Nix Store - volume on `/nix`, which looks like - `UUID= /nix apfs rw,noauto,nobrowse,suid,owners` or - `LABEL=Nix\040Store /nix apfs rw,nobrowse`. This will prevent automatic - mounting of the Nix Store volume. +4. Edit fstab using `sudo vifs` to remove the line mounting the Nix Store volume on `/nix`, which looks like + + ``` + UUID= /nix apfs rw,noauto,nobrowse,suid,owners + ``` + or + + ``` + LABEL=Nix\040Store /nix apfs rw,nobrowse + ``` + + by setting the cursor on the respective line using the error keys, and pressing `dd`, and then `:wq` to save the file. -5. Edit `/etc/synthetic.conf` to remove the `nix` line. If this is the only - line in the file you can remove it entirely, `sudo rm /etc/synthetic.conf`. - This will prevent the creation of the empty `/nix` directory to provide a - mountpoint for the Nix Store volume. + This will prevent automatic mounting of the Nix Store volume. -6. Remove the files Nix added to your system: +5. Edit `/etc/synthetic.conf` to remove the `nix` line. + If this is the only line in the file you can remove it entirely: + + ```bash + if [ -f /etc/synthetic.conf ]; then + if [ "$(cat /etc/synthetic.conf)" = "nix" ]; then + sudo rm /etc/synthetic.conf + else + sudo vi /etc/synthetic.conf + fi + fi + ``` + + This will prevent the creation of the empty `/nix` directory. + +6. Remove the files Nix added to your system, except for the store: ```console sudo rm -rf /etc/nix /var/root/.nix-profile /var/root/.nix-defexpr /var/root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels ``` - This gets rid of any data Nix may have created except for the store which is - removed next. 7. Remove the Nix Store volume: @@ -120,29 +124,32 @@ which you may remove. sudo diskutil apfs deleteVolume /nix ``` - This will remove the Nix Store volume and everything that was added to the - store. + This will remove the Nix Store volume and everything that was added to the store. - If the output indicates that the command couldn't remove the volume, you should - make sure you don't have an _unmounted_ Nix Store volume. Look for a - "Nix Store" volume in the output of the following command: + If the output indicates that the command couldn't remove the volume, you should make sure you don't have an _unmounted_ Nix Store volume. + Look for a "Nix Store" volume in the output of the following command: ```console diskutil list ``` - If you _do_ see a "Nix Store" volume, delete it by re-running the diskutil - deleteVolume command, but replace `/nix` with the store volume's `diskXsY` - identifier. + If you _do_ find a "Nix Store" volume, delete it by running `diskutil deleteVolume` with the store volume's `diskXsY` identifier. > **Note** > -> After you complete the steps here, you will still have an empty `/nix` -> directory. This is an expected sign of a successful uninstall. The empty -> `/nix` directory will disappear the next time you reboot. +> After you complete the steps here, you will still have an empty `/nix` directory. +> This is an expected sign of a successful uninstall. +> The empty `/nix` directory will disappear the next time you reboot. > -> You do not have to reboot to finish uninstalling Nix. The uninstall is -> complete. macOS (Catalina+) directly controls root directories and its -> read-only root will prevent you from manually deleting the empty `/nix` -> mountpoint. +> You do not have to reboot to finish uninstalling Nix. +> The uninstall is complete. +> macOS (Catalina+) directly controls root directories, and its read-only root will prevent you from manually deleting the empty `/nix` mountpoint. +## Single User + +To remove a [single-user installation](./installing-binary.md#single-user-installation) of Nix, run: + +```console +$ rm -rf /nix ~/.nix-channels ~/.nix-defexpr ~/.nix-profile +``` +You might also want to manually remove references to Nix from your `~/.profile`. diff --git a/doc/manual/src/language/constructs.md b/doc/manual/src/language/constructs.md index a82ec5960a8d..4d75ea82c853 100644 --- a/doc/manual/src/language/constructs.md +++ b/doc/manual/src/language/constructs.md @@ -402,7 +402,36 @@ establishes the same scope as let a = 1; in let a = 2; in let a = 3; in let a = 4; in ... ``` +Variables coming from outer `with` expressions *are* shadowed: + +```nix +with { a = "outer"; }; +with { a = "inner"; }; +a +``` + +Does evaluate to `"inner"`. + ## Comments Comments can be single-line, started with a `#` character, or inline/multi-line, enclosed within `/* ... */`. + +`#` comments last until the end of the line. + +`/*` comments run until the next occurrence of `*/`; this cannot be escaped. + +## Scoping rules + +Nix is [statically scoped](https://en.wikipedia.org/wiki/Scope_(computer_science)#Lexical_scope), but with multiple scopes and shadowing rules. + +* primary scope --- explicitly-bound variables + * [`let`](#let-expressions) + * [`inherit`](#inheriting-attributes) + * function arguments + +* secondary scope --- implicitly-bound variables + * [`with`](#with-expressions) + +Primary scope takes precedence over secondary scope. +See [`with`](#with-expressions) for a detailed example. diff --git a/doc/manual/src/language/index.md b/doc/manual/src/language/index.md index 650412f1b5d5..3694480d718d 100644 --- a/doc/manual/src/language/index.md +++ b/doc/manual/src/language/index.md @@ -53,7 +53,7 @@ This is an incomplete overview of language features, by example. - *Basic values* + *Basic values ([primitives](@docroot@/language/values.md#primitives))* @@ -71,7 +71,7 @@ This is an incomplete overview of language features, by example. - A string + A [string](@docroot@/language/values.md#type-string) @@ -94,6 +94,18 @@ This is an incomplete overview of language features, by example. + + + + `# Explanation` + + + + + A [comment](@docroot@/language/constructs.md#comments). + + + @@ -106,7 +118,7 @@ This is an incomplete overview of language features, by example. - String interpolation (expands to `"hello world"`, `"1 2 3"`, `"/nix/store/-bash-/bin/sh"`) + [String interpolation](@docroot@/language/string-interpolation.md) (expands to `"hello world"`, `"1 2 3"`, `"/nix/store/-bash-/bin/sh"`) @@ -118,7 +130,7 @@ This is an incomplete overview of language features, by example. - Booleans + [Booleans](@docroot@/language/values.md#type-boolean) @@ -130,7 +142,7 @@ This is an incomplete overview of language features, by example. - Null value + [Null](@docroot@/language/values.md#type-null) value @@ -142,7 +154,7 @@ This is an incomplete overview of language features, by example. - An integer + An [integer](@docroot@/language/values.md#type-number) @@ -154,7 +166,7 @@ This is an incomplete overview of language features, by example. - A floating point number + A [floating point number](@docroot@/language/values.md#type-number) @@ -166,7 +178,7 @@ This is an incomplete overview of language features, by example. - An absolute path + An absolute [path](@docroot@/language/values.md#type-path) @@ -178,7 +190,7 @@ This is an incomplete overview of language features, by example. - A path relative to the file containing this Nix expression + A [path](@docroot@/language/values.md#type-path) relative to the file containing this Nix expression @@ -190,7 +202,7 @@ This is an incomplete overview of language features, by example. - A home path. Evaluates to the `"/.config"`. + A home [path](@docroot@/language/values.md#type-path). Evaluates to the `"/.config"`. @@ -202,7 +214,7 @@ This is an incomplete overview of language features, by example. - Search path for Nix files. Value determined by [`$NIX_PATH` environment variable](../command-ref/env-common.md#env-NIX_PATH). + A [lookup path](@docroot@/language/constructs/lookup-path.md) for Nix files. Value determined by [`$NIX_PATH` environment variable](../command-ref/env-common.md#env-NIX_PATH). @@ -226,7 +238,7 @@ This is an incomplete overview of language features, by example. - A set with attributes named `x` and `y` + An [attribute set](@docroot@/language/values.md#attribute-set) with attributes named `x` and `y` @@ -250,7 +262,7 @@ This is an incomplete overview of language features, by example. - A recursive set, equivalent to `{ x = "foo"; y = "foobar"; }` + A [recursive set](@docroot@/language/constructs.md#recursive-sets), equivalent to `{ x = "foo"; y = "foobar"; }`. @@ -266,7 +278,7 @@ This is an incomplete overview of language features, by example. - Lists with three elements. + [Lists](@docroot@/language/values.md#list) with three elements. @@ -350,7 +362,7 @@ This is an incomplete overview of language features, by example. - Attribute selection (evaluates to `1`) + [Attribute selection](@docroot@/language/values.md#attribute-set) (evaluates to `1`) @@ -362,7 +374,7 @@ This is an incomplete overview of language features, by example. - Attribute selection with default (evaluates to `3`) + [Attribute selection](@docroot@/language/values.md#attribute-set) with default (evaluates to `3`) @@ -398,7 +410,7 @@ This is an incomplete overview of language features, by example. - Conditional expression + [Conditional expression](@docroot@/language/constructs.md#conditionals). @@ -410,7 +422,7 @@ This is an incomplete overview of language features, by example. - Assertion check (evaluates to `"yes!"`). + [Assertion](@docroot@/language/constructs.md#assertions) check (evaluates to `"yes!"`). @@ -422,7 +434,7 @@ This is an incomplete overview of language features, by example. - Variable definition + Variable definition. See [`let`-expressions](@docroot@/language/constructs.md#let-expressions). @@ -434,7 +446,9 @@ This is an incomplete overview of language features, by example. - Add all attributes from the given set to the scope (evaluates to `1`) + Add all attributes from the given set to the scope (evaluates to `1`). + + See [`with`-expressions](@docroot@/language/constructs.md#with-expressions) for details and shadowing caveats. @@ -447,7 +461,8 @@ This is an incomplete overview of language features, by example. Adds the variables to the current scope (attribute set or `let` binding). - Desugars to `pkgs = pkgs; src = src;` + Desugars to `pkgs = pkgs; src = src;`. + See [Inheriting attributes](@docroot@/language/constructs.md#inheriting-attributes). @@ -460,14 +475,15 @@ This is an incomplete overview of language features, by example. Adds the attributes, from the attribute set in parentheses, to the current scope (attribute set or `let` binding). - Desugars to `lib = pkgs.lib; stdenv = pkgs.stdenv;` + Desugars to `lib = pkgs.lib; stdenv = pkgs.stdenv;`. + See [Inheriting attributes](@docroot@/language/constructs.md#inheriting-attributes). - *Functions (lambdas)* + *[Functions](@docroot@/language/constructs.md#functions) (lambdas)* @@ -484,7 +500,7 @@ This is an incomplete overview of language features, by example. - A function that expects an integer and returns it increased by 1 + A [function](@docroot@/language/constructs.md#functions) that expects an integer and returns it increased by 1. @@ -496,7 +512,7 @@ This is an incomplete overview of language features, by example. - Curried function, equivalent to `x: (y: x + y)`. Can be used like a function that takes two arguments and returns their sum. + Curried [function](@docroot@/language/constructs.md#functions), equivalent to `x: (y: x + y)`. Can be used like a function that takes two arguments and returns their sum. @@ -508,7 +524,7 @@ This is an incomplete overview of language features, by example. - A function call (evaluates to 101) + A [function](@docroot@/language/constructs.md#functions) call (evaluates to 101) @@ -520,7 +536,7 @@ This is an incomplete overview of language features, by example. - A function bound to a variable and subsequently called by name (evaluates to 103) + A [function](@docroot@/language/constructs.md#functions) bound to a variable and subsequently called by name (evaluates to 103) @@ -532,7 +548,7 @@ This is an incomplete overview of language features, by example. - A function that expects a set with required attributes `x` and `y` and concatenates them + A [function](@docroot@/language/constructs.md#functions) that expects a set with required attributes `x` and `y` and concatenates them @@ -544,7 +560,7 @@ This is an incomplete overview of language features, by example. - A function that expects a set with required attribute `x` and optional `y`, using `"bar"` as default value for `y` + A [function](@docroot@/language/constructs.md#functions) that expects a set with required attribute `x` and optional `y`, using `"bar"` as default value for `y` @@ -556,7 +572,7 @@ This is an incomplete overview of language features, by example. - A function that expects a set with required attributes `x` and `y` and ignores any other attributes + A [function](@docroot@/language/constructs.md#functions) that expects a set with required attributes `x` and `y` and ignores any other attributes @@ -570,7 +586,7 @@ This is an incomplete overview of language features, by example. - A function that expects a set with required attributes `x` and `y`, and binds the whole set to `args` + A [function](@docroot@/language/constructs.md#functions) that expects a set with required attributes `x` and `y`, and binds the whole set to `args` @@ -594,7 +610,8 @@ This is an incomplete overview of language features, by example. - Load and return Nix expression in given file + Load and return Nix expression in given file. + See [import](@docroot@/language/builtins.md#builtins-import). @@ -606,7 +623,8 @@ This is an incomplete overview of language features, by example. - Apply a function to every element of a list (evaluates to `[ 2 4 6 ]`) + Apply a function to every element of a list (evaluates to `[ 2 4 6 ]`). + See [`map`](@docroot@/language/builtins.md#builtins-map). diff --git a/doc/manual/src/language/string-context.md b/doc/manual/src/language/string-context.md new file mode 100644 index 000000000000..88ae0d8b0a15 --- /dev/null +++ b/doc/manual/src/language/string-context.md @@ -0,0 +1,134 @@ +# String context + +> **Note** +> +> This is an advanced topic. +> The Nix language is designed to be used without the programmer consciously dealing with string contexts or even knowing what they are. + +A string in the Nix language is not just a sequence of characters like strings in other languages. +It is actually a pair of a sequence of characters and a *string context*. +The string context is an (unordered) set of *string context elements*. + +The purpose of string contexts is to collect non-string values attached to strings via +[string concatenation](./operators.md#string-concatenation), +[string interpolation](./string-interpolation.md), +and similar operations. +The idea is that a user can combine together values to create a build instructions for derivations without manually keeping track of where they come from. +Then the Nix language implicitly does that bookkeeping to efficiently obtain the closure of derivation inputs. + +> **Note** +> +> String contexts are *not* explicitly manipulated in idiomatic Nix language code. + +String context elements come in different forms: + +- [deriving path]{#string-context-element-derived-path} + + A string context element of this type is a [deriving path](@docroot@/glossary.md#gloss-deriving-path). + They can be either of type [constant](#string-context-constant) or [output](#string-context-output), which correspond to the types of deriving paths. + + - [Constant string context elements]{#string-context-constant} + + > **Example** + > + > [`builtins.storePath`] creates a string with a single constant string context element: + > + > ```nix + > builtins.getContext (builtins.storePath "/nix/store/wkhdf9jinag5750mqlax6z2zbwhqb76n-hello-2.10") + > ``` + > evaluates to + > ```nix + > { + > "/nix/store/wkhdf9jinag5750mqlax6z2zbwhqb76n-hello-2.10" = { + > path = true; + > }; + > } + > ``` + + [deriving path]: @docroot@/glossary.md#gloss-deriving-path + [store path]: @docroot@/glossary.md#gloss-store-path + [`builtins.storePath`]: ./builtins.md#builtins-storePath + + - [Output string context elements]{#string-context-output} + + > **Example** + > + > The behavior of string contexts are best demonstrated with a built-in function that is still experimental: [`builtins.outputOf`]. + > This example will *not* work with stable Nix! + > + > ```nix + > builtins.getContext + > (builtins.outputOf + > (builtins.storePath "/nix/store/fvchh9cvcr7kdla6n860hshchsba305w-hello-2.12.drv") + > "out") + > ``` + > evaluates to + > ```nix + > { + > "/nix/store/fvchh9cvcr7kdla6n860hshchsba305w-hello-2.12.drv" = { + > outputs = [ "out" ]; + > }; + > } + > ``` + + [`builtins.outputOf`]: ./builtins.md#builtins-outputOf + +- [*derivation deep*]{#string-context-element-derivation-deep} + + *derivation deep* is an advanced feature intended to be used with the + [`exportReferencesGraph` derivation attribute](./advanced-attributes.html#adv-attr-exportReferencesGraph). + A *derivation deep* string context element is a derivation path, and refers to both its outputs and the entire build closure of that derivation: + all its outputs, all the other derivations the given derivation depends on, and all the outputs of those. + + > **Example** + > + > The best way to illustrate *derivation deep* string contexts is with [`builtins.addDrvOutputDependencies`]. + > Take a regular constant string context element pointing to a derivation, and transform it into a "Derivation deep" string context element. + > + > ```nix + > builtins.getContext + > (builtins.addDrvOutputDependencies + > (builtins.storePath "/nix/store/fvchh9cvcr7kdla6n860hshchsba305w-hello-2.12.drv")) + > ``` + > evaluates to + > ```nix + > { + > "/nix/store/fvchh9cvcr7kdla6n860hshchsba305w-hello-2.12.drv" = { + > allOutputs = true; + > }; + > } + > ``` + + [`builtins.addDrvOutputDependencies`]: ./builtins.md#builtins-addDrvOutputDependencies + [`builtins.unsafeDiscardOutputDependency`]: ./builtins.md#builtins-unsafeDiscardOutputDependency + +## Inspecting string contexts + +Most basically, [`builtins.hasContext`] will tell whether a string has a non-empty context. + +When more granular information is needed, [`builtins.getContext`] can be used. +It creates an [attribute set] representing the string context, which can be inspected as usual. + +[`builtins.hasContext`]: ./builtins.md#builtins-hasContext +[`builtins.getContext`]: ./builtins.md#builtins-getContext +[attribute set]: ./values.md#attribute-set + +## Clearing string contexts + +[`buitins.unsafeDiscardStringContext`](./builtins.md#builtins-unsafeDiscardStringContext) will make a copy of a string, but with an empty string context. +The returned string can be used in more ways, e.g. by operators that require the string context to be empty. +The requirement to explicitly discard the string context in such use cases helps ensure that string context elements are not lost by mistake. +The "unsafe" marker is only there to remind that Nix normally guarantees that dependencies are tracked, whereas the returned string has lost them. + +## Constructing string contexts + +[`builtins.appendContext`] will create a copy of a string, but with additional string context elements. +The context is specified explicitly by an [attribute set] in the format that [`builtins.hasContext`] produces. +A string with arbitrary contexts can be made like this: + +1. Create a string with the desired string context elements. + (The contents of the string do not matter.) +2. Dump its context with [`builtins.getContext`]. +3. Combine it with a base string and repeated [`builtins.appendContext`] calls. + +[`builtins.appendContext`]: ./builtins.md#builtins-appendContext diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md index 568542c0bcc0..2dd52b379f69 100644 --- a/doc/manual/src/language/values.md +++ b/doc/manual/src/language/values.md @@ -92,39 +92,50 @@ - Path - *Paths*, e.g., `/bin/sh` or `./builder.sh`. A path must contain at - least one slash to be recognised as such. For instance, `builder.sh` - is not a path: it's parsed as an expression that selects the - attribute `sh` from the variable `builder`. If the file name is - relative, i.e., if it does not begin with a slash, it is made - absolute at parse time relative to the directory of the Nix - expression that contained it. For instance, if a Nix expression in - `/foo/bar/bla.nix` refers to `../xyzzy/fnord.nix`, the absolute path - is `/foo/xyzzy/fnord.nix`. - - If the first component of a path is a `~`, it is interpreted as if - the rest of the path were relative to the user's home directory. - e.g. `~/foo` would be equivalent to `/home/edolstra/foo` for a user - whose home directory is `/home/edolstra`. - - For instance, evaluating `"${./foo.txt}"` will cause `foo.txt` in the current directory to be copied into the Nix store and result in the string `"/nix/store/-foo.txt"`. - - Note that the Nix language assumes that all input files will remain _unchanged_ while evaluating a Nix expression. + *Paths* are distinct from strings and can be expressed by path literals such as `./builder.sh`. + + Paths are suitable for referring to local files, and are often preferable over strings. + - Path values do not contain trailing slashes, `.` and `..`, as they are resolved when evaluating a path literal. + - Path literals are automatically resolved relative to their [base directory](@docroot@/glossary.md#gloss-base-directory). + - The files referred to by path values are automatically copied into the Nix store when used in a string interpolation or concatenation. + - Tooling can recognize path literals and provide additional features, such as autocompletion, refactoring automation and jump-to-file. + + A path literal must contain at least one slash to be recognised as such. + For instance, `builder.sh` is not a path: + it's parsed as an expression that selects the attribute `sh` from the variable `builder`. + + Path literals may also refer to absolute paths by starting with a slash. + + > **Note** + > + > Absolute paths make expressions less portable. + > In the case where a function translates a path literal into an absolute path string for a configuration file, it is recommended to write a string literal instead. + > This avoids some confusion about whether files at that location will be used during evaluation. + > It also avoids unintentional situations where some function might try to copy everything at the location into the store. + + If the first component of a path is a `~`, it is interpreted such that the rest of the path were relative to the user's home directory. + For example, `~/foo` would be equivalent to `/home/edolstra/foo` for a user whose home directory is `/home/edolstra`. + Path literals that start with `~` are not allowed in [pure](@docroot@/command-ref/conf-file.md#conf-pure-eval) evaluation. + + Paths can be used in [string interpolation] and string concatenation. + For instance, evaluating `"${./foo.txt}"` will cause `foo.txt` from the same directory to be copied into the Nix store and result in the string `"/nix/store/-foo.txt"`. + + Note that the Nix language assumes that all input files will remain _unchanged_ while evaluating a Nix expression. For example, assume you used a file path in an interpolated string during a `nix repl` session. - Later in the same session, after having changed the file contents, evaluating the interpolated string with the file path again might not return a new [store path], since Nix might not re-read the file contents. + Later in the same session, after having changed the file contents, evaluating the interpolated string with the file path again might not return a new [store path], since Nix might not re-read the file contents. Use `:r` to reset the repl as needed. [store path]: @docroot@/glossary.md#gloss-store-path - Paths can include [string interpolation] and can themselves be [interpolated in other expressions]. + Path literals can also include [string interpolation], besides being [interpolated into other expressions]. - [interpolated in other expressions]: ./string-interpolation.md#interpolated-expressions + [interpolated into other expressions]: ./string-interpolation.md#interpolated-expressions At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path. - `a.${foo}/b.${bar}` is a syntactically valid division operation. + `a.${foo}/b.${bar}` is a syntactically valid number division operation. `./a.${foo}/b.${bar}` is a path. - [Lookup paths](./constructs/lookup-path.md) such as `` resolve to path values. + [Lookup path](./constructs/lookup-path.md) literals such as `` also resolve to path values. - Boolean diff --git a/doc/manual/src/release-notes/rl-2.22.md b/doc/manual/src/release-notes/rl-2.22.md new file mode 100644 index 000000000000..c78d3d692af5 --- /dev/null +++ b/doc/manual/src/release-notes/rl-2.22.md @@ -0,0 +1,21 @@ +# Release 2.22.0 (2024-04-23) + +### Significant changes + +- Remove experimental repl-flake [#10103](https://github.com/NixOS/nix/issues/10103) [#10299](https://github.com/NixOS/nix/pull/10299) + + The `repl-flake` experimental feature has been removed. The `nix repl` command now works like the rest of the new CLI in that `nix repl {path}` now tries to load a flake at `{path}` (or fails if the `flakes` experimental feature isn't enabled). + +### Other changes + +- `nix eval` prints derivations as `.drv` paths [#10200](https://github.com/NixOS/nix/pull/10200) + + `nix eval` will now print derivations as their `.drv` paths, rather than as + attribute sets. This makes commands like `nix eval nixpkgs#bash` terminate + instead of infinitely looping into recursive self-referential attributes: + + ```ShellSession + $ nix eval nixpkgs#bash + «derivation /nix/store/m32cbgbd598f4w299g0hwyv7gbw6rqcg-bash-5.2p26.drv» + ``` + diff --git a/flake.lock b/flake.lock index bb2e400c0a99..409463ad8c38 100644 --- a/flake.lock +++ b/flake.lock @@ -16,6 +16,41 @@ "type": "github" } }, + "flake-parts": { + "inputs": { + "nixpkgs-lib": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1712014858, + "narHash": "sha256-sB4SWl2lX95bExY2gMFG5HIzvva5AVMJd4Igm+GpZNw=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "9126214d0a59633752a136528f5f3b9aa8565b7d", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "flake-parts", + "type": "github" + } + }, + "flake-utils": { + "locked": { + "lastModified": 1667395993, + "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, "libgit2": { "flake": false, "locked": { @@ -64,12 +99,40 @@ "type": "github" } }, + "pre-commit-hooks": { + "inputs": { + "flake-compat": [], + "flake-utils": "flake-utils", + "gitignore": [], + "nixpkgs": [ + "nixpkgs" + ], + "nixpkgs-stable": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1712897695, + "narHash": "sha256-nMirxrGteNAl9sWiOhoN5tIHyjBbVi5e2tgZUgZlK3Y=", + "owner": "cachix", + "repo": "pre-commit-hooks.nix", + "rev": "40e6053ecb65fcbf12863338a6dcefb3f55f1bf8", + "type": "github" + }, + "original": { + "owner": "cachix", + "repo": "pre-commit-hooks.nix", + "type": "github" + } + }, "root": { "inputs": { "flake-compat": "flake-compat", + "flake-parts": "flake-parts", "libgit2": "libgit2", "nixpkgs": "nixpkgs", - "nixpkgs-regression": "nixpkgs-regression" + "nixpkgs-regression": "nixpkgs-regression", + "pre-commit-hooks": "pre-commit-hooks" } } }, diff --git a/flake.nix b/flake.nix index 07d909fcda0b..987f2530521e 100644 --- a/flake.nix +++ b/flake.nix @@ -8,7 +8,19 @@ inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; }; - outputs = { self, nixpkgs, nixpkgs-regression, libgit2, ... }: + # dev tooling + inputs.flake-parts.url = "github:hercules-ci/flake-parts"; + inputs.pre-commit-hooks.url = "github:cachix/pre-commit-hooks.nix"; + # work around https://github.com/NixOS/nix/issues/7730 + inputs.flake-parts.inputs.nixpkgs-lib.follows = "nixpkgs"; + inputs.pre-commit-hooks.inputs.nixpkgs.follows = "nixpkgs"; + inputs.pre-commit-hooks.inputs.nixpkgs-stable.follows = "nixpkgs"; + # work around 7730 and https://github.com/NixOS/nix/issues/7807 + inputs.pre-commit-hooks.inputs.flake-compat.follows = ""; + inputs.pre-commit-hooks.inputs.gitignore.follows = ""; + + outputs = inputs@{ self, nixpkgs, nixpkgs-regression, libgit2, ... }: + let inherit (nixpkgs) lib; @@ -57,6 +69,17 @@ }) stdenvs); + + # We don't apply flake-parts to the whole flake so that non-development attributes + # load without fetching any development inputs. + devFlake = inputs.flake-parts.lib.mkFlake { inherit inputs; } { + imports = [ ./maintainers/flake-module.nix ]; + systems = lib.subtractLists crossSystems systems; + perSystem = { system, ... }: { + _module.args.pkgs = nixpkgsFor.${system}.native; + }; + }; + # Memoize nixpkgs for different platforms for efficiency. nixpkgsFor = forAllSystems (system: let @@ -156,6 +179,14 @@ ]; }); + libseccomp-nix = final.libseccomp.overrideAttrs (_: rec { + version = "2.5.5"; + src = final.fetchurl { + url = "https://github.com/seccomp/libseccomp/releases/download/v${version}/libseccomp-${version}.tar.gz"; + hash = "sha256-JIosik2bmFiqa69ScSw0r+/PnJ6Ut23OAsHJqiX7M3U="; + }; + }); + changelog-d-nix = final.buildPackages.callPackage ./misc/changelog-d.nix { }; nix = @@ -175,6 +206,7 @@ officialRelease = false; boehmgc = final.boehmgc-nix; libgit2 = final.libgit2-nix; + libseccomp = final.libseccomp-nix; busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell; } // { # this is a proper separate downstream package, but put @@ -186,6 +218,13 @@ inherit fileset stdenv; }; + # See https://github.com/NixOS/nixpkgs/pull/214409 + # Remove when fixed in this flake's nixpkgs + pre-commit = + if prev.stdenv.hostPlatform.system == "i686-linux" + then (prev.pre-commit.override (o: { dotnet-sdk = ""; })).overridePythonAttrs (o: { doCheck = false; }) + else prev.pre-commit; + }; in { @@ -361,7 +400,8 @@ # Since the support is only best-effort there, disable the perl # bindings perlBindings = self.hydraJobs.perlBindings.${system}; - }); + } // devFlake.checks.${system} or {} + ); packages = forAllSystems (system: rec { inherit (nixpkgsFor.${system}.native) nix changelog-d-nix; @@ -396,7 +436,11 @@ stdenvs))); devShells = let - makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; forDevShell = true; }).overrideAttrs (attrs: { + makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; forDevShell = true; }).overrideAttrs (attrs: + let + modular = devFlake.getSystem stdenv.buildPlatform.system; + in { + pname = "shell-for-" + attrs.pname; installFlags = "sysconfdir=$(out)/etc"; shellHook = '' PATH=$prefix/bin:$PATH @@ -407,7 +451,21 @@ XDG_DATA_DIRS+=:$out/share ''; + # We use this shell with the local checkout, not unpackPhase. + src = null; + + env = { + # For `make format`, to work without installing pre-commit + _NIX_PRE_COMMIT_HOOKS_CONFIG = + "${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml" modular.pre-commit.settings.rawConfig}"; + }; + nativeBuildInputs = attrs.nativeBuildInputs or [] + ++ [ + modular.pre-commit.settings.package + (pkgs.writeScriptBin "pre-commit-hooks-install" + modular.pre-commit.settings.installationScript) + ] # TODO: Remove the darwin check once # https://github.com/NixOS/nixpkgs/pull/291814 is available ++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear diff --git a/maintainers/README.md b/maintainers/README.md index fa321c7c09f8..bfa0cb5a1830 100644 --- a/maintainers/README.md +++ b/maintainers/README.md @@ -36,11 +36,13 @@ We aim to achieve this by improving the contributor experience and attracting mo - Robert Hensing (@roberth) - John Ericson (@Ericson2314) +The team is on Github as [@NixOS/nix-team](https://github.com/orgs/NixOS/teams/nix-team). + ## Meeting protocol -The team meets twice a week: +The team meets twice a week (times are denoted in the [Europe/Amsterdam](https://en.m.wikipedia.org/wiki/Time_in_the_Netherlands) time zone): -- Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) +- Discussion meeting: [Wednesday 21:00-22:00 Europe/Amsterdam](https://www.google.com/calendar/event?eid=ZG5rZzNyajRjajducGV2NGY5aGkzYWIwdnJfMjAyNDA1MDhUMTkwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) 1. Triage issues and pull requests from the [No Status](#no-status) column (30 min) 2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min). @@ -49,15 +51,19 @@ The team meets twice a week: - mark it as draft if it is blocked on the contributor - escalate it back to the team by moving it to To discuss, and leaving a comment as to why the issue needs to be discussed again. -- Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) +- Work meeting: [Mondays 13:00-15:00 Europe/Amsterdam](https://www.google.com/calendar/event?eid=Ym52NDdzYnRic2NzcDcybjZiNDhpNzhpa3NfMjAyNDA1MTNUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) 1. Code review on pull requests from [In review](#in-review). 2. Other chores and tasks. Meeting notes are collected on a [collaborative scratchpad](https://pad.lassul.us/Cv7FpYx-Ri-4VjUykQOLAw). -Notes on issues and pull requests are posted as comments and linked from the meeting notes, so they are easy to find from both places. +Notes on issues and pull requests are posted as comments and linked from the meeting notes, so they are can be found from both places. [All meeting notes](https://discourse.nixos.org/search?expanded=true&q=Nix%20team%20meeting%20minutes%20%23%20%23dev%3Anix%20in%3Atitle%20order%3Alatest_topic) are published on Discourse under the [Nix category](https://discourse.nixos.org/c/dev/nix/50). +Team meetings are generally open to anyone interested. +We can make exceptions to discuss sensitive issues, such as security incidents or people matters. +Contact any team member to get a calendar invite for reminders and updates. + ## Project board protocol The team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19/views/1) for tracking its work. diff --git a/maintainers/flake-module.nix b/maintainers/flake-module.nix new file mode 100644 index 000000000000..351a01fcbffb --- /dev/null +++ b/maintainers/flake-module.nix @@ -0,0 +1,436 @@ +{ lib, getSystem, inputs, ... }: + +{ + imports = [ + inputs.pre-commit-hooks.flakeModule + ]; + + perSystem = { config, pkgs, ... }: { + + # https://flake.parts/options/pre-commit-hooks-nix.html#options + pre-commit.settings = { + hooks = { + clang-format.enable = true; + # TODO: nixfmt, https://github.com/NixOS/nixfmt/issues/153 + }; + + excludes = [ + # We don't want to format test data + # ''tests/(?!nixos/).*\.nix'' + ''^tests/.*'' + + # Don't format vendored code + ''^src/toml11/.*'' + ''^doc/manual/redirects\.js$'' + ''^doc/manual/theme/highlight\.js$'' + + # We haven't applied formatting to these files yet + ''^doc/manual/redirects\.js$'' + ''^doc/manual/theme/highlight\.js$'' + ''^precompiled-headers\.h$'' + ''^src/build-remote/build-remote\.cc$'' + ''^src/libcmd/built-path\.cc$'' + ''^src/libcmd/built-path\.hh$'' + ''^src/libcmd/command\.cc$'' + ''^src/libcmd/command\.hh$'' + ''^src/libcmd/common-eval-args\.cc$'' + ''^src/libcmd/common-eval-args\.hh$'' + ''^src/libcmd/editor-for\.cc$'' + ''^src/libcmd/installable-attr-path\.cc$'' + ''^src/libcmd/installable-attr-path\.hh$'' + ''^src/libcmd/installable-derived-path\.cc$'' + ''^src/libcmd/installable-derived-path\.hh$'' + ''^src/libcmd/installable-flake\.cc$'' + ''^src/libcmd/installable-flake\.hh$'' + ''^src/libcmd/installable-value\.cc$'' + ''^src/libcmd/installable-value\.hh$'' + ''^src/libcmd/installables\.cc$'' + ''^src/libcmd/installables\.hh$'' + ''^src/libcmd/legacy\.hh$'' + ''^src/libcmd/markdown\.cc$'' + ''^src/libcmd/misc-store-flags\.cc$'' + ''^src/libcmd/repl-interacter\.cc$'' + ''^src/libcmd/repl-interacter\.hh$'' + ''^src/libcmd/repl\.cc$'' + ''^src/libcmd/repl\.hh$'' + ''^src/libexpr-c/nix_api_expr\.cc$'' + ''^src/libexpr-c/nix_api_external\.cc$'' + ''^src/libexpr/attr-path\.cc$'' + ''^src/libexpr/attr-path\.hh$'' + ''^src/libexpr/attr-set\.cc$'' + ''^src/libexpr/attr-set\.hh$'' + ''^src/libexpr/eval-cache\.cc$'' + ''^src/libexpr/eval-cache\.hh$'' + ''^src/libexpr/eval-error\.cc$'' + ''^src/libexpr/eval-inline\.hh$'' + ''^src/libexpr/eval-settings\.cc$'' + ''^src/libexpr/eval-settings\.hh$'' + ''^src/libexpr/eval\.cc$'' + ''^src/libexpr/eval\.hh$'' + ''^src/libexpr/flake/config\.cc$'' + ''^src/libexpr/flake/flake\.cc$'' + ''^src/libexpr/flake/flake\.hh$'' + ''^src/libexpr/flake/flakeref\.cc$'' + ''^src/libexpr/flake/flakeref\.hh$'' + ''^src/libexpr/flake/lockfile\.cc$'' + ''^src/libexpr/flake/lockfile\.hh$'' + ''^src/libexpr/flake/url-name\.cc$'' + ''^src/libexpr/function-trace\.cc$'' + ''^src/libexpr/gc-small-vector\.hh$'' + ''^src/libexpr/get-drvs\.cc$'' + ''^src/libexpr/get-drvs\.hh$'' + ''^src/libexpr/json-to-value\.cc$'' + ''^src/libexpr/nixexpr\.cc$'' + ''^src/libexpr/nixexpr\.hh$'' + ''^src/libexpr/parser-state\.hh$'' + ''^src/libexpr/pos-table\.hh$'' + ''^src/libexpr/primops\.cc$'' + ''^src/libexpr/primops\.hh$'' + ''^src/libexpr/primops/context\.cc$'' + ''^src/libexpr/primops/fetchClosure\.cc$'' + ''^src/libexpr/primops/fetchMercurial\.cc$'' + ''^src/libexpr/primops/fetchTree\.cc$'' + ''^src/libexpr/primops/fromTOML\.cc$'' + ''^src/libexpr/print-ambiguous\.cc$'' + ''^src/libexpr/print-ambiguous\.hh$'' + ''^src/libexpr/print-options\.hh$'' + ''^src/libexpr/print\.cc$'' + ''^src/libexpr/print\.hh$'' + ''^src/libexpr/search-path\.cc$'' + ''^src/libexpr/symbol-table\.hh$'' + ''^src/libexpr/value-to-json\.cc$'' + ''^src/libexpr/value-to-json\.hh$'' + ''^src/libexpr/value-to-xml\.cc$'' + ''^src/libexpr/value-to-xml\.hh$'' + ''^src/libexpr/value\.hh$'' + ''^src/libexpr/value/context\.cc$'' + ''^src/libexpr/value/context\.hh$'' + ''^src/libfetchers/attrs\.cc$'' + ''^src/libfetchers/cache\.cc$'' + ''^src/libfetchers/cache\.hh$'' + ''^src/libfetchers/fetch-settings\.cc$'' + ''^src/libfetchers/fetch-settings\.hh$'' + ''^src/libfetchers/fetch-to-store\.cc$'' + ''^src/libfetchers/fetchers\.cc$'' + ''^src/libfetchers/fetchers\.hh$'' + ''^src/libfetchers/filtering-source-accessor\.cc$'' + ''^src/libfetchers/filtering-source-accessor\.hh$'' + ''^src/libfetchers/fs-source-accessor\.cc$'' + ''^src/libfetchers/fs-source-accessor\.hh$'' + ''^src/libfetchers/git-utils\.cc$'' + ''^src/libfetchers/git-utils\.hh$'' + ''^src/libfetchers/github\.cc$'' + ''^src/libfetchers/indirect\.cc$'' + ''^src/libfetchers/memory-source-accessor\.cc$'' + ''^src/libfetchers/path\.cc$'' + ''^src/libfetchers/registry\.cc$'' + ''^src/libfetchers/registry\.hh$'' + ''^src/libfetchers/tarball\.cc$'' + ''^src/libfetchers/tarball\.hh$'' + ''^src/libfetchers/unix/git\.cc$'' + ''^src/libfetchers/unix/mercurial\.cc$'' + ''^src/libmain/common-args\.cc$'' + ''^src/libmain/common-args\.hh$'' + ''^src/libmain/loggers\.cc$'' + ''^src/libmain/loggers\.hh$'' + ''^src/libmain/progress-bar\.cc$'' + ''^src/libmain/shared\.cc$'' + ''^src/libmain/shared\.hh$'' + ''^src/libmain/unix/stack\.cc$'' + ''^src/libstore/binary-cache-store\.cc$'' + ''^src/libstore/binary-cache-store\.hh$'' + ''^src/libstore/build-result\.hh$'' + ''^src/libstore/builtins\.hh$'' + ''^src/libstore/builtins/buildenv\.cc$'' + ''^src/libstore/builtins/buildenv\.hh$'' + ''^src/libstore/common-protocol-impl\.hh$'' + ''^src/libstore/common-protocol\.cc$'' + ''^src/libstore/common-protocol\.hh$'' + ''^src/libstore/content-address\.cc$'' + ''^src/libstore/content-address\.hh$'' + ''^src/libstore/daemon\.cc$'' + ''^src/libstore/daemon\.hh$'' + ''^src/libstore/derivations\.cc$'' + ''^src/libstore/derivations\.hh$'' + ''^src/libstore/derived-path-map\.cc$'' + ''^src/libstore/derived-path-map\.hh$'' + ''^src/libstore/derived-path\.cc$'' + ''^src/libstore/derived-path\.hh$'' + ''^src/libstore/downstream-placeholder\.cc$'' + ''^src/libstore/downstream-placeholder\.hh$'' + ''^src/libstore/dummy-store\.cc$'' + ''^src/libstore/export-import\.cc$'' + ''^src/libstore/filetransfer\.cc$'' + ''^src/libstore/filetransfer\.hh$'' + ''^src/libstore/gc-store\.hh$'' + ''^src/libstore/globals\.cc$'' + ''^src/libstore/globals\.hh$'' + ''^src/libstore/http-binary-cache-store\.cc$'' + ''^src/libstore/legacy-ssh-store\.cc$'' + ''^src/libstore/legacy-ssh-store\.hh$'' + ''^src/libstore/length-prefixed-protocol-helper\.hh$'' + ''^src/libstore/linux/personality\.cc$'' + ''^src/libstore/linux/personality\.hh$'' + ''^src/libstore/local-binary-cache-store\.cc$'' + ''^src/libstore/local-fs-store\.cc$'' + ''^src/libstore/local-fs-store\.hh$'' + ''^src/libstore/log-store\.cc$'' + ''^src/libstore/log-store\.hh$'' + ''^src/libstore/machines\.cc$'' + ''^src/libstore/machines\.hh$'' + ''^src/libstore/make-content-addressed\.cc$'' + ''^src/libstore/make-content-addressed\.hh$'' + ''^src/libstore/misc\.cc$'' + ''^src/libstore/names\.cc$'' + ''^src/libstore/names\.hh$'' + ''^src/libstore/nar-accessor\.cc$'' + ''^src/libstore/nar-accessor\.hh$'' + ''^src/libstore/nar-info-disk-cache\.cc$'' + ''^src/libstore/nar-info-disk-cache\.hh$'' + ''^src/libstore/nar-info\.cc$'' + ''^src/libstore/nar-info\.hh$'' + ''^src/libstore/outputs-spec\.cc$'' + ''^src/libstore/outputs-spec\.hh$'' + ''^src/libstore/parsed-derivations\.cc$'' + ''^src/libstore/path-info\.cc$'' + ''^src/libstore/path-info\.hh$'' + ''^src/libstore/path-references\.cc$'' + ''^src/libstore/path-regex\.hh$'' + ''^src/libstore/path-with-outputs\.cc$'' + ''^src/libstore/path\.cc$'' + ''^src/libstore/path\.hh$'' + ''^src/libstore/pathlocks\.cc$'' + ''^src/libstore/pathlocks\.hh$'' + ''^src/libstore/profiles\.cc$'' + ''^src/libstore/profiles\.hh$'' + ''^src/libstore/realisation\.cc$'' + ''^src/libstore/realisation\.hh$'' + ''^src/libstore/remote-fs-accessor\.cc$'' + ''^src/libstore/remote-fs-accessor\.hh$'' + ''^src/libstore/remote-store-connection\.hh$'' + ''^src/libstore/remote-store\.cc$'' + ''^src/libstore/remote-store\.hh$'' + ''^src/libstore/s3-binary-cache-store\.cc$'' + ''^src/libstore/s3\.hh$'' + ''^src/libstore/serve-protocol-impl\.cc$'' + ''^src/libstore/serve-protocol-impl\.hh$'' + ''^src/libstore/serve-protocol\.cc$'' + ''^src/libstore/serve-protocol\.hh$'' + ''^src/libstore/sqlite\.cc$'' + ''^src/libstore/sqlite\.hh$'' + ''^src/libstore/ssh-store-config\.hh$'' + ''^src/libstore/ssh-store\.cc$'' + ''^src/libstore/ssh\.cc$'' + ''^src/libstore/ssh\.hh$'' + ''^src/libstore/store-api\.cc$'' + ''^src/libstore/store-api\.hh$'' + ''^src/libstore/store-dir-config\.hh$'' + ''^src/libstore/unix/build/derivation-goal\.cc$'' + ''^src/libstore/unix/build/derivation-goal\.hh$'' + ''^src/libstore/unix/build/drv-output-substitution-goal\.cc$'' + ''^src/libstore/unix/build/drv-output-substitution-goal\.hh$'' + ''^src/libstore/unix/build/entry-points\.cc$'' + ''^src/libstore/unix/build/goal\.cc$'' + ''^src/libstore/unix/build/goal\.hh$'' + ''^src/libstore/unix/build/hook-instance\.cc$'' + ''^src/libstore/unix/build/local-derivation-goal\.cc$'' + ''^src/libstore/unix/build/local-derivation-goal\.hh$'' + ''^src/libstore/unix/build/substitution-goal\.cc$'' + ''^src/libstore/unix/build/substitution-goal\.hh$'' + ''^src/libstore/unix/build/worker\.cc$'' + ''^src/libstore/unix/build/worker\.hh$'' + ''^src/libstore/unix/builtins/fetchurl\.cc$'' + ''^src/libstore/unix/builtins/unpack-channel\.cc$'' + ''^src/libstore/gc\.cc$'' + ''^src/libstore/unix/local-overlay-store\.cc$'' + ''^src/libstore/unix/local-overlay-store\.hh$'' + ''^src/libstore/local-store\.cc$'' + ''^src/libstore/local-store\.hh$'' + ''^src/libstore/unix/lock\.cc$'' + ''^src/libstore/unix/lock\.hh$'' + ''^src/libstore/optimise-store\.cc$'' + ''^src/libstore/unix/pathlocks\.cc$'' + ''^src/libstore/posix-fs-canonicalise\.cc$'' + ''^src/libstore/posix-fs-canonicalise\.hh$'' + ''^src/libstore/uds-remote-store\.cc$'' + ''^src/libstore/uds-remote-store\.hh$'' + ''^src/libstore/windows/build\.cc$'' + ''^src/libstore/worker-protocol-impl\.hh$'' + ''^src/libstore/worker-protocol\.cc$'' + ''^src/libstore/worker-protocol\.hh$'' + ''^src/libutil-c/nix_api_util_internal\.h$'' + ''^src/libutil/archive\.cc$'' + ''^src/libutil/archive\.hh$'' + ''^src/libutil/args\.cc$'' + ''^src/libutil/args\.hh$'' + ''^src/libutil/args/root\.hh$'' + ''^src/libutil/callback\.hh$'' + ''^src/libutil/canon-path\.cc$'' + ''^src/libutil/canon-path\.hh$'' + ''^src/libutil/chunked-vector\.hh$'' + ''^src/libutil/closure\.hh$'' + ''^src/libutil/comparator\.hh$'' + ''^src/libutil/compute-levels\.cc$'' + ''^src/libutil/config-impl\.hh$'' + ''^src/libutil/config\.cc$'' + ''^src/libutil/config\.hh$'' + ''^src/libutil/current-process\.cc$'' + ''^src/libutil/current-process\.hh$'' + ''^src/libutil/english\.cc$'' + ''^src/libutil/english\.hh$'' + ''^src/libutil/environment-variables\.cc$'' + ''^src/libutil/error\.cc$'' + ''^src/libutil/error\.hh$'' + ''^src/libutil/exit\.hh$'' + ''^src/libutil/experimental-features\.cc$'' + ''^src/libutil/experimental-features\.hh$'' + ''^src/libutil/file-content-address\.cc$'' + ''^src/libutil/file-content-address\.hh$'' + ''^src/libutil/file-descriptor\.cc$'' + ''^src/libutil/file-descriptor\.hh$'' + ''^src/libutil/file-path-impl\.hh$'' + ''^src/libutil/file-path\.hh$'' + ''^src/libutil/file-system\.cc$'' + ''^src/libutil/file-system\.hh$'' + ''^src/libutil/finally\.hh$'' + ''^src/libutil/fmt\.hh$'' + ''^src/libutil/fs-sink\.cc$'' + ''^src/libutil/fs-sink\.hh$'' + ''^src/libutil/git\.cc$'' + ''^src/libutil/git\.hh$'' + ''^src/libutil/hash\.cc$'' + ''^src/libutil/hash\.hh$'' + ''^src/libutil/hilite\.cc$'' + ''^src/libutil/hilite\.hh$'' + ''^src/libutil/source-accessor\.hh$'' + ''^src/libutil/json-impls\.hh$'' + ''^src/libutil/json-utils\.cc$'' + ''^src/libutil/json-utils\.hh$'' + ''^src/libutil/linux/cgroup\.cc$'' + ''^src/libutil/linux/namespaces\.cc$'' + ''^src/libutil/logging\.cc$'' + ''^src/libutil/logging\.hh$'' + ''^src/libutil/lru-cache\.hh$'' + ''^src/libutil/memory-source-accessor\.cc$'' + ''^src/libutil/memory-source-accessor\.hh$'' + ''^src/libutil/pool\.hh$'' + ''^src/libutil/position\.cc$'' + ''^src/libutil/position\.hh$'' + ''^src/libutil/posix-source-accessor\.cc$'' + ''^src/libutil/posix-source-accessor\.hh$'' + ''^src/libutil/processes\.hh$'' + ''^src/libutil/ref\.hh$'' + ''^src/libutil/references\.cc$'' + ''^src/libutil/references\.hh$'' + ''^src/libutil/regex-combinators\.hh$'' + ''^src/libutil/serialise\.cc$'' + ''^src/libutil/serialise\.hh$'' + ''^src/libutil/signals\.hh$'' + ''^src/libutil/signature/local-keys\.cc$'' + ''^src/libutil/signature/local-keys\.hh$'' + ''^src/libutil/signature/signer\.cc$'' + ''^src/libutil/signature/signer\.hh$'' + ''^src/libutil/source-accessor\.cc$'' + ''^src/libutil/source-accessor\.hh$'' + ''^src/libutil/source-path\.cc$'' + ''^src/libutil/source-path\.hh$'' + ''^src/libutil/split\.hh$'' + ''^src/libutil/suggestions\.cc$'' + ''^src/libutil/suggestions\.hh$'' + ''^src/libutil/sync\.hh$'' + ''^src/libutil/terminal\.cc$'' + ''^src/libutil/terminal\.hh$'' + ''^src/libutil/thread-pool\.cc$'' + ''^src/libutil/thread-pool\.hh$'' + ''^src/libutil/topo-sort\.hh$'' + ''^src/libutil/types\.hh$'' + ''^src/libutil/unix/file-descriptor\.cc$'' + ''^src/libutil/unix/file-path\.cc$'' + ''^src/libutil/unix/monitor-fd\.hh$'' + ''^src/libutil/unix/processes\.cc$'' + ''^src/libutil/unix/signals-impl\.hh$'' + ''^src/libutil/unix/signals\.cc$'' + ''^src/libutil/unix-domain-socket\.cc$'' + ''^src/libutil/unix/users\.cc$'' + ''^src/libutil/url-parts\.hh$'' + ''^src/libutil/url\.cc$'' + ''^src/libutil/url\.hh$'' + ''^src/libutil/users\.cc$'' + ''^src/libutil/users\.hh$'' + ''^src/libutil/util\.cc$'' + ''^src/libutil/util\.hh$'' + ''^src/libutil/variant-wrapper\.hh$'' + ''^src/libutil/windows/environment-variables\.cc$'' + ''^src/libutil/windows/file-descriptor\.cc$'' + ''^src/libutil/windows/file-path\.cc$'' + ''^src/libutil/windows/processes\.cc$'' + ''^src/libutil/windows/users\.cc$'' + ''^src/libutil/windows/windows-error\.cc$'' + ''^src/libutil/windows/windows-error\.hh$'' + ''^src/libutil/xml-writer\.cc$'' + ''^src/libutil/xml-writer\.hh$'' + ''^src/nix-build/nix-build\.cc$'' + ''^src/nix-channel/nix-channel\.cc$'' + ''^src/nix-collect-garbage/nix-collect-garbage\.cc$'' + ''^src/nix-env/buildenv.nix$'' + ''^src/nix-env/nix-env\.cc$'' + ''^src/nix-env/user-env\.cc$'' + ''^src/nix-env/user-env\.hh$'' + ''^src/nix-instantiate/nix-instantiate\.cc$'' + ''^src/nix-store/dotgraph\.cc$'' + ''^src/nix-store/graphml\.cc$'' + ''^src/nix-store/nix-store\.cc$'' + ''^src/nix/add-to-store\.cc$'' + ''^src/nix/app\.cc$'' + ''^src/nix/build\.cc$'' + ''^src/nix/bundle\.cc$'' + ''^src/nix/cat\.cc$'' + ''^src/nix/config-check\.cc$'' + ''^src/nix/config\.cc$'' + ''^src/nix/copy\.cc$'' + ''^src/nix/derivation-add\.cc$'' + ''^src/nix/derivation-show\.cc$'' + ''^src/nix/derivation\.cc$'' + ''^src/nix/develop\.cc$'' + ''^src/nix/diff-closures\.cc$'' + ''^src/nix/dump-path\.cc$'' + ''^src/nix/edit\.cc$'' + ''^src/nix/eval\.cc$'' + ''^src/nix/flake\.cc$'' + ''^src/nix/fmt\.cc$'' + ''^src/nix/hash\.cc$'' + ''^src/nix/log\.cc$'' + ''^src/nix/ls\.cc$'' + ''^src/nix/main\.cc$'' + ''^src/nix/make-content-addressed\.cc$'' + ''^src/nix/nar\.cc$'' + ''^src/nix/optimise-store\.cc$'' + ''^src/nix/path-from-hash-part\.cc$'' + ''^src/nix/path-info\.cc$'' + ''^src/nix/prefetch\.cc$'' + ''^src/nix/profile\.cc$'' + ''^src/nix/realisation\.cc$'' + ''^src/nix/registry\.cc$'' + ''^src/nix/repl\.cc$'' + ''^src/nix/run\.cc$'' + ''^src/nix/run\.hh$'' + ''^src/nix/search\.cc$'' + ''^src/nix/sigs\.cc$'' + ''^src/nix/store-copy-log\.cc$'' + ''^src/nix/store-delete\.cc$'' + ''^src/nix/store-gc\.cc$'' + ''^src/nix/store-info\.cc$'' + ''^src/nix/store-repair\.cc$'' + ''^src/nix/store\.cc$'' + ''^src/nix/unix/daemon\.cc$'' + ''^src/nix/upgrade-nix\.cc$'' + ''^src/nix/verify\.cc$'' + ''^src/nix/why-depends\.cc$'' + ]; + }; + + }; + + # We'll be pulling from this in the main flake + flake.getSystem = getSystem; +} diff --git a/maintainers/local.mk b/maintainers/local.mk new file mode 100644 index 000000000000..88d594d67d87 --- /dev/null +++ b/maintainers/local.mk @@ -0,0 +1,15 @@ + +.PHONY: format +print-top-help += echo ' format: Format source code' + +# This uses the cached .pre-commit-hooks.yaml file +format: + @if ! type -p pre-commit &>/dev/null; then \ + echo "make format: pre-commit not found. Please use \`nix develop\`."; \ + exit 1; \ + fi; \ + if test -z "$$_NIX_PRE_COMMIT_HOOKS_CONFIG"; then \ + echo "make format: _NIX_PRE_COMMIT_HOOKS_CONFIG not set. Please use \`nix develop\`."; \ + exit 1; \ + fi; \ + pre-commit run --config $$_NIX_PRE_COMMIT_HOOKS_CONFIG --all-files diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl index 9a30f8227ee7..9e73524a632d 100755 --- a/maintainers/upload-release.pl +++ b/maintainers/upload-release.pl @@ -189,10 +189,7 @@ sub downloadFile { eval { downloadFile("dockerImage.$system", "1", $fn); }; - if ($@) { - warn "$@" if $@; - next; - } + die "$@" if $@; $haveDocker = 1; print STDERR "loading docker image for $dockerPlatform...\n"; diff --git a/mk/common-test.sh b/mk/common-test.sh index 2783d293bc42..2abea7887d1b 100644 --- a/mk/common-test.sh +++ b/mk/common-test.sh @@ -17,11 +17,3 @@ TESTS_ENVIRONMENT=( run () { cd "$(dirname $1)" && env "${TESTS_ENVIRONMENT[@]}" $BASH -x -e -u -o pipefail $(basename $1) } - -init_test () { - run "$init" 2>/dev/null > /dev/null -} - -run_test_proper () { - run "$test" -} diff --git a/mk/debug-test.sh b/mk/debug-test.sh index 52482c01e47c..0dd4406c38e1 100755 --- a/mk/debug-test.sh +++ b/mk/debug-test.sh @@ -3,12 +3,8 @@ set -eu -o pipefail test=$1 -init=${2-} dir="$(dirname "${BASH_SOURCE[0]}")" source "$dir/common-test.sh" -if [ -n "$init" ]; then - (init_test) -fi -run_test_proper +run "$test" diff --git a/mk/lib.mk b/mk/lib.mk index a002d823fe33..1e7af6ad556c 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -87,15 +87,14 @@ $(foreach script, $(bin-scripts), $(eval $(call install-program-in,$(script),$(b $(foreach script, $(bin-scripts), $(eval programs-list += $(script))) $(foreach script, $(noinst-scripts), $(eval programs-list += $(script))) $(foreach template, $(template-files), $(eval $(call instantiate-template,$(template)))) -install_test_init=tests/functional/init.sh $(foreach test, $(install-tests), \ - $(eval $(call run-test,$(test),$(install_test_init))) \ + $(eval $(call run-test,$(test))) \ $(eval installcheck: $(test).test)) $(foreach test-group, $(install-tests-groups), \ - $(eval $(call run-test-group,$(test-group),$(install_test_init))) \ + $(eval $(call run-test-group,$(test-group))) \ $(eval installcheck: $(test-group).test-group) \ $(foreach test, $($(test-group)-tests), \ - $(eval $(call run-test,$(test),$(install_test_init))) \ + $(eval $(call run-test,$(test))) \ $(eval $(test-group).test-group: $(test).test))) # Compilation database. diff --git a/mk/run-test.sh b/mk/run-test.sh index da9c5a473b40..1256bfcf748b 100755 --- a/mk/run-test.sh +++ b/mk/run-test.sh @@ -8,7 +8,6 @@ yellow="" normal="" test=$1 -init=${2-} dir="$(dirname "${BASH_SOURCE[0]}")" source "$dir/common-test.sh" @@ -22,10 +21,7 @@ if [ -t 1 ]; then fi run_test () { - if [ -n "$init" ]; then - (init_test 2>/dev/null > /dev/null) - fi - log="$(run_test_proper 2>&1)" && status=0 || status=$? + log="$(run "$test" 2>&1)" && status=0 || status=$? } run_test diff --git a/mk/tests.mk b/mk/tests.mk index bac9b704ad10..0a10f6d3bfd8 100644 --- a/mk/tests.mk +++ b/mk/tests.mk @@ -12,8 +12,8 @@ endef define run-test - $(eval $(call run-bash,$1.test,$1 $(test-deps),mk/run-test.sh $1 $2)) - $(eval $(call run-bash,$1.test-debug,$1 $(test-deps),mk/debug-test.sh $1 $2)) + $(eval $(call run-bash,$1.test,$1 $(test-deps),mk/run-test.sh $1)) + $(eval $(call run-bash,$1.test-debug,$1 $(test-deps),mk/debug-test.sh $1)) endef diff --git a/package.nix b/package.nix index 1e5b9e449b35..cf1654c6a23b 100644 --- a/package.nix +++ b/package.nix @@ -1,4 +1,5 @@ { lib +, fetchurl , stdenv , releaseTools , autoconf-archive @@ -167,6 +168,8 @@ in { ./m4 # TODO: do we really need README.md? It doesn't seem used in the build. ./README.md + # This could be put behind a conditional + ./maintainers/local.mk # For make, regardless of what we are building ./local.mk ./Makefile diff --git a/perl/.yath.rc b/perl/.yath.rc deleted file mode 100644 index 118bf80c8210..000000000000 --- a/perl/.yath.rc +++ /dev/null @@ -1,2 +0,0 @@ -[test] --I=rel(lib/Nix) diff --git a/perl/.yath.rc.in b/perl/.yath.rc.in new file mode 100644 index 000000000000..e6f5f93ecdd1 --- /dev/null +++ b/perl/.yath.rc.in @@ -0,0 +1,2 @@ +[test] +-I=rel(@lib_dir@) diff --git a/perl/Makefile b/perl/Makefile deleted file mode 100644 index 832668dd1559..000000000000 --- a/perl/Makefile +++ /dev/null @@ -1,21 +0,0 @@ -makefiles = local.mk - -GLOBAL_CXXFLAGS += -g -Wall -std=c++2a - -# A convenience for concurrent development of Nix and its Perl bindings. -# Not needed in a standalone build of the Perl bindings. -ifneq ("$(wildcard ../src)", "") - GLOBAL_CXXFLAGS += -I ../src -endif - --include Makefile.config - -OPTIMIZE = 1 - -ifeq ($(OPTIMIZE), 1) - GLOBAL_CXXFLAGS += -O3 -else - GLOBAL_CXXFLAGS += -O0 -endif - -include mk/lib.mk diff --git a/perl/Makefile.config.in b/perl/Makefile.config.in deleted file mode 100644 index d856de3ada86..000000000000 --- a/perl/Makefile.config.in +++ /dev/null @@ -1,18 +0,0 @@ -HOST_OS = @host_os@ -CC = @CC@ -CFLAGS = @CFLAGS@ -CXX = @CXX@ -CXXFLAGS = @CXXFLAGS@ -PACKAGE_NAME = @PACKAGE_NAME@ -PACKAGE_VERSION = @PACKAGE_VERSION@ -SODIUM_LIBS = @SODIUM_LIBS@ -NIX_CFLAGS = @NIX_CFLAGS@ -NIX_LIBS = @NIX_LIBS@ -nixbindir = @nixbindir@ -curl = @curl@ -nixlibexecdir = @nixlibexecdir@ -nixlocalstatedir = @nixlocalstatedir@ -perl = @perl@ -perllibdir = @perllibdir@ -nixstoredir = @nixstoredir@ -nixsysconfdir = @nixsysconfdir@ diff --git a/perl/configure.ac b/perl/configure.ac deleted file mode 100644 index a02cb06c9ed6..000000000000 --- a/perl/configure.ac +++ /dev/null @@ -1,84 +0,0 @@ -AC_INIT(nix-perl, m4_esyscmd([bash -c "echo -n $(cat ../.version)$VERSION_SUFFIX"])) -AC_CONFIG_SRCDIR(MANIFEST) -AC_CONFIG_AUX_DIR(../config) - -CFLAGS= -CXXFLAGS= -AC_PROG_CC -AC_PROG_CXX - -AC_CANONICAL_HOST - -# Use 64-bit file system calls so that we can support files > 2 GiB. -AC_SYS_LARGEFILE - -AC_DEFUN([NEED_PROG], -[ -AC_PATH_PROG($1, $2) -if test -z "$$1"; then - AC_MSG_ERROR([$2 is required]) -fi -]) - -NEED_PROG(perl, perl) -NEED_PROG(curl, curl) -NEED_PROG(bzip2, bzip2) -NEED_PROG(xz, xz) - -# Test that Perl has the open/fork feature (Perl 5.8.0 and beyond). -AC_MSG_CHECKING([whether Perl is recent enough]) -if ! $perl -e 'open(FOO, "-|", "true"); while () { print; }; close FOO or die;'; then - AC_MSG_RESULT(no) - AC_MSG_ERROR([Your Perl version is too old. Nix requires Perl 5.8.0 or newer.]) -fi -AC_MSG_RESULT(yes) - - -# Figure out where to install Perl modules. -AC_MSG_CHECKING([for the Perl installation prefix]) -perlversion=$($perl -e 'use Config; print $Config{version};') -perlarchname=$($perl -e 'use Config; print $Config{archname};') -AC_SUBST(perllibdir, [${libdir}/perl5/site_perl/$perlversion/$perlarchname]) -AC_MSG_RESULT($perllibdir) - -# Look for libsodium. -PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"]) - -# Check for the required Perl dependencies (DBI and DBD::SQLite). -perlFlags="-I$perllibdir" - -AC_ARG_WITH(dbi, AC_HELP_STRING([--with-dbi=PATH], - [prefix of the Perl DBI library]), - perlFlags="$perlFlags -I$withval") - -AC_ARG_WITH(dbd-sqlite, AC_HELP_STRING([--with-dbd-sqlite=PATH], - [prefix of the Perl DBD::SQLite library]), - perlFlags="$perlFlags -I$withval") - -AC_MSG_CHECKING([whether DBD::SQLite works]) -if ! $perl $perlFlags -e 'use DBI; use DBD::SQLite;' 2>&5; then - AC_MSG_RESULT(no) - AC_MSG_FAILURE([The Perl modules DBI and/or DBD::SQLite are missing.]) -fi -AC_MSG_RESULT(yes) - -AC_SUBST(perlFlags) - -PKG_CHECK_MODULES([NIX], [nix-store]) - -NEED_PROG([NIX], [nix]) - -# Expand all variables in config.status. -test "$prefix" = NONE && prefix=$ac_default_prefix -test "$exec_prefix" = NONE && exec_prefix='${prefix}' -for name in $ac_subst_vars; do - declare $name="$(eval echo "${!name}")" - declare $name="$(eval echo "${!name}")" - declare $name="$(eval echo "${!name}")" -done - -rm -f Makefile.config -ln -sfn ../mk mk - -AC_CONFIG_FILES([]) -AC_OUTPUT diff --git a/perl/default.nix b/perl/default.nix index 7103574c9a93..45682381ea56 100644 --- a/perl/default.nix +++ b/perl/default.nix @@ -1,47 +1,52 @@ -{ lib, fileset +{ lib +, fileset , stdenv -, perl, perlPackages -, autoconf-archive, autoreconfHook, pkg-config -, nix, curl, bzip2, xz, boost, libsodium, darwin +, perl +, perlPackages +, meson +, ninja +, pkg-config +, nix +, curl +, bzip2 +, xz +, boost +, libsodium +, darwin }: perl.pkgs.toPerlModule (stdenv.mkDerivation (finalAttrs: { name = "nix-perl-${nix.version}"; src = fileset.toSource { - root = ../.; + root = ./.; fileset = fileset.unions ([ - ../.version - ../m4 - ../mk ./MANIFEST - ./Makefile - ./Makefile.config.in - ./configure.ac ./lib - ./local.mk + ./meson.build + ./meson_options.txt ] ++ lib.optionals finalAttrs.doCheck [ - ./.yath.rc + ./.yath.rc.in ./t ]); }; - nativeBuildInputs = - [ autoconf-archive - autoreconfHook - pkg-config - ]; - - buildInputs = - [ nix - curl - bzip2 - xz - perl - boost - ] - ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium - ++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security; + nativeBuildInputs = [ + meson + ninja + pkg-config + ]; + + buildInputs = [ + nix + curl + bzip2 + xz + perl + boost + ] + ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium + ++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security; # `perlPackages.Test2Harness` is marked broken for Darwin doCheck = !stdenv.isDarwin; @@ -50,12 +55,16 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation (finalAttrs: { perlPackages.Test2Harness ]; - configureFlags = [ - "--with-dbi=${perlPackages.DBI}/${perl.libPrefix}" - "--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}" + mesonFlags = [ + (lib.mesonOption "version" (builtins.readFile ../.version)) + (lib.mesonOption "dbi_path" "${perlPackages.DBI}/${perl.libPrefix}") + (lib.mesonOption "dbd_sqlite_path" "${perlPackages.DBDSQLite}/${perl.libPrefix}") + (lib.mesonEnable "tests" finalAttrs.doCheck) ]; - enableParallelBuilding = true; + mesonCheckFlags = [ + "--print-errorlogs" + ]; - postUnpack = "sourceRoot=$sourceRoot/perl"; + enableParallelBuilding = true; })) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 1c64cc66b14c..ee211ef64933 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -1,4 +1,4 @@ -#include "config.h" +#include "nix/config.h" #include "EXTERN.h" #include "perl.h" @@ -256,9 +256,8 @@ SV * hashPath(char * algo, int base32, char * path) PPCODE: try { - auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path); Hash h = hashPath( - accessor, canonPath, + PosixSourceAccessor::createAtRoot(path), FileIngestionMethod::Recursive, parseHashAlgo(algo)); auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); @@ -336,10 +335,9 @@ StoreWrapper::addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; - auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(srcPath); auto path = THIS->store->addToStore( std::string(baseNameOf(srcPath)), - accessor, canonPath, + PosixSourceAccessor::createAtRoot(srcPath), method, parseHashAlgo(algo)); XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0))); } catch (Error & e) { diff --git a/perl/lib/Nix/meson.build b/perl/lib/Nix/meson.build new file mode 100644 index 000000000000..9a79245cd5e8 --- /dev/null +++ b/perl/lib/Nix/meson.build @@ -0,0 +1,59 @@ +# Nix-Perl Scripts +#============================================================================ + + + +# Sources +#------------------------------------------------- + +nix_perl_store_xs = files('Store.xs') + +nix_perl_scripts = files( + 'CopyClosure.pm', + 'Manifest.pm', + 'SSH.pm', + 'Store.pm', + 'Utils.pm', +) + +foreach f : nix_perl_scripts + fs.copyfile(f) +endforeach + + +# Targets +#--------------------------------------------------- + +nix_perl_scripts += configure_file( + output : 'Config.pm', + input : 'Config.pm.in', + configuration : nix_perl_conf, +) + +nix_perl_store_cc = custom_target( + 'Store.cc', + output : 'Store.cc', + input : nix_perl_store_xs, + command : [xsubpp, '@INPUT@', '-output', '@OUTPUT@'], +) + +# Build Nix::Store Library +#------------------------------------------------- +nix_perl_store_lib = library( + 'Store', + sources : nix_perl_store_cc, + name_prefix : '', + install : true, + install_mode : 'rwxr-xr-x', + install_dir : join_paths(nix_perl_install_dir, 'auto', 'Nix', 'Store'), + dependencies : nix_perl_store_dep_list, +) + + +# Install Scripts +#--------------------------------------------------- +install_data( + nix_perl_scripts, + install_mode : 'rw-r--r--', + install_dir : join_paths(nix_perl_install_dir,'Nix'), +) diff --git a/perl/local.mk b/perl/local.mk deleted file mode 100644 index ed4764eb96bc..000000000000 --- a/perl/local.mk +++ /dev/null @@ -1,46 +0,0 @@ -nix_perl_sources := \ - lib/Nix/Store.pm \ - lib/Nix/Manifest.pm \ - lib/Nix/SSH.pm \ - lib/Nix/CopyClosure.pm \ - lib/Nix/Config.pm.in \ - lib/Nix/Utils.pm - -nix_perl_modules := $(nix_perl_sources:.in=) - -$(foreach x, $(nix_perl_modules), $(eval $(call install-data-in, $(x), $(perllibdir)/Nix))) - -lib/Nix/Store.cc: lib/Nix/Store.xs - $(trace-gen) xsubpp $^ -output $@ - -libraries += Store - -Store_DIR := lib/Nix - -Store_SOURCES := $(Store_DIR)/Store.cc - -Store_CXXFLAGS = \ - $(NIX_CFLAGS) \ - -I$(shell perl -e 'use Config; print $$Config{archlibexp};')/CORE \ - -D_FILE_OFFSET_BITS=64 \ - -Wno-unknown-warning-option -Wno-unused-variable -Wno-literal-suffix \ - -Wno-reserved-user-defined-literal -Wno-duplicate-decl-specifier -Wno-pointer-bool-conversion - -Store_LDFLAGS := $(SODIUM_LIBS) $(NIX_LIBS) - -ifdef HOST_CYGWIN - archlib = $(shell perl -E 'use Config; print $$Config{archlib};') - libperl = $(shell perl -E 'use Config; print $$Config{libperl};') - Store_LDFLAGS += $(shell find ${archlib} -name ${libperl}) -endif - -Store_ALLOW_UNDEFINED = 1 - -Store_FORCE_INSTALL = 1 - -Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store - -clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config - -check: all - yath test diff --git a/perl/meson.build b/perl/meson.build new file mode 100644 index 000000000000..350e5bd67b78 --- /dev/null +++ b/perl/meson.build @@ -0,0 +1,160 @@ +# Nix-Perl Meson build +#============================================================================ + + +# init project +#============================================================================ +project ( + 'nix-perl', + 'cpp', + meson_version : '>= 0.64.0', + license : 'LGPL-2.1-or-later', +) + +# setup env +#------------------------------------------------- +fs = import('fs') +nix_version = get_option('version') +cpp = meson.get_compiler('cpp') +nix_perl_conf = configuration_data() +nix_perl_conf.set('PACKAGE_VERSION', nix_version) + + +# set error arguments +#------------------------------------------------- +error_args = [ + '-Wno-pedantic', + '-Wno-non-virtual-dtor', + '-Wno-unused-parameter', + '-Wno-variadic-macros', + '-Wdeprecated-declarations', + '-Wno-missing-field-initializers', + '-Wno-unknown-warning-option', + '-Wno-unused-variable', + '-Wno-literal-suffix', + '-Wno-reserved-user-defined-literal', + '-Wno-duplicate-decl-specifier', + '-Wno-pointer-bool-conversion', +] + +add_project_arguments( + cpp.get_supported_arguments(error_args), + language : 'cpp', +) + + +# set install directories +#------------------------------------------------- +prefix = get_option('prefix') +libdir = join_paths(prefix, get_option('libdir')) + +# Dependencies +#============================================================================ + +# Required Programs +#------------------------------------------------- +xz = find_program('xz') +xsubpp = find_program('xsubpp') +perl = find_program('perl') +curl = find_program('curl') +yath = find_program('yath', required : false) + +# Required Libraries +#------------------------------------------------- +bzip2_dep = dependency('bzip2') +curl_dep = dependency('libcurl') +libsodium_dep = dependency('libsodium') +# nix_util_dep = dependency('nix-util') +nix_store_dep = dependency('nix-store') + + +# Finding Perl Headers is a pain. as they do not have +# pkgconfig available, are not in a standard location, +# and are installed into a version folder. Use the +# Perl binary to give hints about perl include dir. +#------------------------------------------------- +perl_archname = run_command( + perl, '-e', 'use Config; print $Config{archname};', check: true).stdout() +perl_version = run_command( + perl, '-e', 'use Config; print $Config{version};', check: true).stdout() +perl_archlibexp = run_command( + perl, '-e', 'use Config; print $Config{archlibexp};', check: true).stdout() +perl_site_libdir = run_command( + perl, '-e', 'use Config; print $Config{installsitearch};', check: true).stdout() +nix_perl_install_dir = join_paths( + libdir, 'perl5', 'site_perl', perl_version, perl_archname) + + +# print perl hints for logs +#------------------------------------------------- +message('Perl archname: @0@'.format(perl_archname)) +message('Perl version: @0@'.format(perl_version)) +message('Perl archlibexp: @0@'.format(perl_archlibexp)) +message('Perl install site: @0@'.format(perl_site_libdir)) +message('Assumed Nix-Perl install dir: @0@'.format(nix_perl_install_dir)) + +# Now find perl modules +#------------------------------------------------- +perl_check_dbi = run_command( + perl, + '-e', 'use DBI; use DBD::SQLite;', + '-I@0@'.format(get_option('dbi_path')), + '-I@0@'.format(get_option('dbd_sqlite_path')), + check: true +) + +if perl_check_dbi.returncode() == 2 + error('The Perl modules DBI and/or DBD::SQLite are missing.') +else + message('Found Perl Modules: DBI, DBD::SQLite.') +endif + + + +# declare perl dependency +#------------------------------------------------- +perl_dep = declare_dependency( + dependencies : cpp.find_library( + 'perl', + has_headers : [ + join_paths(perl_archlibexp, 'CORE', 'perl.h'), + join_paths(perl_archlibexp, 'CORE', 'EXTERN.h')], + dirs : [ + join_paths(perl_archlibexp, 'CORE'), + ], + ), + include_directories : join_paths(perl_archlibexp, 'CORE'), +) + +# declare dependencies +#------------------------------------------------- +nix_perl_store_dep_list = [ + perl_dep, + bzip2_dep, + curl_dep, + libsodium_dep, + nix_store_dep, +] + +# # build +# #------------------------------------------------- +lib_dir = join_paths('lib', 'Nix') +subdir(lib_dir) + +if get_option('tests').enabled() + yath_rc_conf = configuration_data() + yath_rc_conf.set('lib_dir', lib_dir) + yath_rc = configure_file( + output : '.yath.rc', + input : '.yath.rc.in', + configuration : yath_rc_conf, + ) + subdir('t') + test( + 'nix-perl-test', + yath, + args : ['test'], + workdir : meson.current_build_dir(), + depends : [nix_perl_store_lib], + ) +endif diff --git a/perl/meson_options.txt b/perl/meson_options.txt new file mode 100644 index 000000000000..82ca52f37195 --- /dev/null +++ b/perl/meson_options.txt @@ -0,0 +1,32 @@ +# Nix-Perl build options +#============================================================================ + + +# compiler args +#============================================================================ + +option( + 'version', + type : 'string', + description : 'nix-perl version') + +option( + 'tests', + type : 'feature', + value : 'disabled', + description : 'run nix-perl tests') + + +# Location of Perl Modules +#============================================================================ +option( + 'dbi_path', + type : 'string', + value : '/usr', + description : 'path to perl::dbi') + +option( + 'dbd_sqlite_path', + type : 'string', + value : '/usr', + description : 'path to perl::dbd-SQLite') diff --git a/perl/t/meson.build b/perl/t/meson.build new file mode 100644 index 000000000000..dbd1139f327d --- /dev/null +++ b/perl/t/meson.build @@ -0,0 +1,15 @@ +# Nix-Perl Tests +#============================================================================ + + +# src +#--------------------------------------------------- + +nix_perl_tests = files( + 'init.t', +) + + +foreach f : nix_perl_tests + fs.copyfile(f) +endforeach diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh index 202a9bb548b7..a62ed7e3aa44 100755 --- a/scripts/install-systemd-multi-user.sh +++ b/scripts/install-systemd-multi-user.sh @@ -35,7 +35,7 @@ escape_systemd_env() { # Gather all non-empty proxy environment variables into a string create_systemd_proxy_env() { - vars="http_proxy https_proxy ftp_proxy no_proxy HTTP_PROXY HTTPS_PROXY FTP_PROXY NO_PROXY" + vars="http_proxy https_proxy ftp_proxy all_proxy no_proxy HTTP_PROXY HTTPS_PROXY FTP_PROXY ALL_PROXY NO_PROXY" for v in $vars; do if [ "x${!v:-}" != "x" ]; then echo "Environment=${v}=$(escape_systemd_env ${!v})" diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 2a4723643f8f..18eee830b9e0 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -22,7 +22,6 @@ #include "experimental-features.hh" using namespace nix; -using namespace nix::unix; using std::cin; static void handleAlarm(int sig) { diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index 220a90cf686f..543250da3ac0 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -128,10 +128,10 @@ ref EvalCommand::getEvalState() evalState = #if HAVE_BOEHMGC std::allocate_shared(traceable_allocator(), - searchPath, getEvalStore(), getStore()) + lookupPath, getEvalStore(), getStore()) #else std::make_shared( - searchPath, getEvalStore(), getStore()) + lookupPath, getEvalStore(), getStore()) #endif ; diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index c6ee0d0b2593..155b43b700b2 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -125,7 +125,7 @@ MixEvalArgs::MixEvalArgs() .category = category, .labels = {"path"}, .handler = {[&](std::string s) { - searchPath.elements.emplace_back(SearchPath::Elem::parse(s)); + lookupPath.elements.emplace_back(LookupPath::Elem::parse(s)); }} }); diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh index 25ce5b9dada1..75cb19334fe8 100644 --- a/src/libcmd/common-eval-args.hh +++ b/src/libcmd/common-eval-args.hh @@ -23,7 +23,7 @@ struct MixEvalArgs : virtual Args, virtual MixRepair Bindings * getAutoArgs(EvalState & state); - SearchPath searchPath; + LookupPath lookupPath; std::optional evalStoreUrl; @@ -38,6 +38,9 @@ private: std::map autoArgs; }; +/** + * @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) + */ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr); } diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index ed67723778f4..43e312540ee4 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -21,7 +21,6 @@ #include "url.hh" #include "registry.hh" #include "build-result.hh" -#include "fs-input-accessor.hh" #include #include @@ -147,7 +146,7 @@ MixFlakeOptions::MixFlakeOptions() .category = category, .labels = {"flake-lock-path"}, .handler = {[&](std::string lockFilePath) { - lockFlags.referenceLockFilePath = getUnfilteredRootPath(CanonPath(absPath(lockFilePath))); + lockFlags.referenceLockFilePath = {getFSSourceAccessor(), CanonPath(absPath(lockFilePath))}; }}, .completer = completePath }); @@ -443,13 +442,10 @@ ref openEvalCache( EvalState & state, std::shared_ptr lockedFlake) { - auto fingerprint = lockedFlake->getFingerprint(state.store); - return make_ref( - evalSettings.useEvalCache && evalSettings.pureEval - ? fingerprint - : std::nullopt, - state, - [&state, lockedFlake]() + auto fingerprint = evalSettings.useEvalCache && evalSettings.pureEval + ? lockedFlake->getFingerprint(state.store) + : std::nullopt; + auto rootLoader = [&state, lockedFlake]() { /* For testing whether the evaluation cache is complete. */ @@ -465,7 +461,17 @@ ref openEvalCache( assert(aOutputs); return aOutputs->value; - }); + }; + + if (fingerprint) { + auto search = state.evalCaches.find(fingerprint.value()); + if (search == state.evalCaches.end()) { + search = state.evalCaches.emplace(fingerprint.value(), make_ref(fingerprint, state, rootLoader)).first; + } + return search->second; + } else { + return make_ref(std::nullopt, state, rootLoader); + } } Installables SourceExprCommand::parseInstallables( diff --git a/src/libcmd/network-proxy.cc b/src/libcmd/network-proxy.cc new file mode 100644 index 000000000000..633b2c005c15 --- /dev/null +++ b/src/libcmd/network-proxy.cc @@ -0,0 +1,45 @@ +#include "network-proxy.hh" + +#include +#include + +#include "environment-variables.hh" + +namespace nix { + +static const StringSet lowercaseVariables{"http_proxy", "https_proxy", "ftp_proxy", "all_proxy", "no_proxy"}; + +static StringSet getAllVariables() +{ + StringSet variables = lowercaseVariables; + for (const auto & variable : lowercaseVariables) { + variables.insert(boost::to_upper_copy(variable)); + } + return variables; +} + +const StringSet networkProxyVariables = getAllVariables(); + +static StringSet getExcludingNoProxyVariables() +{ + static const StringSet excludeVariables{"no_proxy", "NO_PROXY"}; + StringSet variables; + std::set_difference( + networkProxyVariables.begin(), networkProxyVariables.end(), excludeVariables.begin(), excludeVariables.end(), + std::inserter(variables, variables.begin())); + return variables; +} + +static const StringSet excludingNoProxyVariables = getExcludingNoProxyVariables(); + +bool haveNetworkProxyConnection() +{ + for (const auto & variable : excludingNoProxyVariables) { + if (getEnv(variable).has_value()) { + return true; + } + } + return false; +} + +} diff --git a/src/libcmd/network-proxy.hh b/src/libcmd/network-proxy.hh new file mode 100644 index 000000000000..0b6856acbf43 --- /dev/null +++ b/src/libcmd/network-proxy.hh @@ -0,0 +1,22 @@ +#pragma once +///@file + +#include "types.hh" + +namespace nix { + +/** + * Environment variables relating to network proxying. These are used by + * a few misc commands. + * + * See the Environment section of https://curl.se/docs/manpage.html for details. + */ +extern const StringSet networkProxyVariables; + +/** + * Heuristically check if there is a proxy connection by checking for defined + * proxy variables. + */ +bool haveNetworkProxyConnection(); + +} diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index ffbb43a698cd..a069dd52cb88 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -77,7 +77,7 @@ struct NixRepl std::unique_ptr interacter; - NixRepl(const SearchPath & searchPath, nix::ref store,ref state, + NixRepl(const LookupPath & lookupPath, nix::ref store,ref state, std::function getValues); virtual ~NixRepl() = default; @@ -122,7 +122,7 @@ std::string removeWhitespace(std::string s) } -NixRepl::NixRepl(const SearchPath & searchPath, nix::ref store, ref state, +NixRepl::NixRepl(const LookupPath & lookupPath, nix::ref store, ref state, std::function getValues) : AbstractNixRepl(state) , debugTraceIndex(0) @@ -259,11 +259,13 @@ StringSet NixRepl::completePrefix(const std::string & prefix) try { auto dir = std::string(cur, 0, slash); auto prefix2 = std::string(cur, slash + 1); - for (auto & entry : readDirectory(dir == "" ? "/" : dir)) { - if (entry.name[0] != '.' && hasPrefix(entry.name, prefix2)) - completions.insert(prev + dir + "/" + entry.name); + for (auto & entry : std::filesystem::directory_iterator{dir == "" ? "/" : dir}) { + auto name = entry.path().filename().string(); + if (name[0] != '.' && hasPrefix(name, prefix2)) + completions.insert(prev + entry.path().string()); } } catch (Error &) { + } catch (std::filesystem::filesystem_error &) { } } else if ((dot = cur.rfind('.')) == std::string::npos) { /* This is a variable name; look it up in the current scope. */ @@ -506,9 +508,13 @@ ProcessLineResult NixRepl::processLine(std::string line) auto editor = args.front(); args.pop_front(); + // avoid garbling the editor with the progress bar + logger->pause(); + Finally resume([&]() { logger->resume(); }); + // runProgram redirects stdout to a StringSink, // using runProgram2 to allow editors to display their UI - runProgram2(RunOptions { .program = editor, .searchPath = true, .args = args }); + runProgram2(RunOptions { .program = editor, .lookupPath = true, .args = args }); // Reload right after exiting the editor state->resetFileCache(); @@ -784,11 +790,11 @@ void NixRepl::evalString(std::string s, Value & v) std::unique_ptr AbstractNixRepl::create( - const SearchPath & searchPath, nix::ref store, ref state, + const LookupPath & lookupPath, nix::ref store, ref state, std::function getValues) { return std::make_unique( - searchPath, + lookupPath, openStore(), state, getValues @@ -804,9 +810,9 @@ ReplExitStatus AbstractNixRepl::runSimple( NixRepl::AnnotatedValues values; return values; }; - SearchPath searchPath = {}; + LookupPath lookupPath = {}; auto repl = std::make_unique( - searchPath, + lookupPath, openStore(), evalState, getValues diff --git a/src/libcmd/repl.hh b/src/libcmd/repl.hh index aac79ec7404d..3fd4b2c391ac 100644 --- a/src/libcmd/repl.hh +++ b/src/libcmd/repl.hh @@ -20,7 +20,7 @@ struct AbstractNixRepl typedef std::vector> AnnotatedValues; static std::unique_ptr create( - const SearchPath & searchPath, nix::ref store, ref state, + const LookupPath & lookupPath, nix::ref store, ref state, std::function getValues); static ReplExitStatus runSimple( diff --git a/src/libexpr-c/nix_api_expr.cc b/src/libexpr-c/nix_api_expr.cc index a5c03d5aace1..a29c3425e5f1 100644 --- a/src/libexpr-c/nix_api_expr.cc +++ b/src/libexpr-c/nix_api_expr.cc @@ -85,17 +85,17 @@ nix_err nix_value_force_deep(nix_c_context * context, EvalState * state, Value * NIXC_CATCH_ERRS } -EvalState * nix_state_create(nix_c_context * context, const char ** searchPath_c, Store * store) +EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath_c, Store * store) { if (context) context->last_err_code = NIX_OK; try { - nix::Strings searchPath; - if (searchPath_c != nullptr) - for (size_t i = 0; searchPath_c[i] != nullptr; i++) - searchPath.push_back(searchPath_c[i]); + nix::Strings lookupPath; + if (lookupPath_c != nullptr) + for (size_t i = 0; lookupPath_c[i] != nullptr; i++) + lookupPath.push_back(lookupPath_c[i]); - return new EvalState{nix::EvalState(nix::SearchPath::parse(searchPath), store->ptr)}; + return new EvalState{nix::EvalState(nix::LookupPath::parse(lookupPath), store->ptr)}; } NIXC_CATCH_ERRS_NULL } diff --git a/src/libexpr-c/nix_api_expr.h b/src/libexpr-c/nix_api_expr.h index fd9746ab713a..04fc92f0f99d 100644 --- a/src/libexpr-c/nix_api_expr.h +++ b/src/libexpr-c/nix_api_expr.h @@ -140,11 +140,11 @@ nix_err nix_value_force_deep(nix_c_context * context, EvalState * state, Value * * @brief Create a new Nix language evaluator state. * * @param[out] context Optional, stores error information - * @param[in] searchPath Array of strings corresponding to entries in NIX_PATH. + * @param[in] lookupPath Array of strings corresponding to entries in NIX_PATH. * @param[in] store The Nix store to use. * @return A new Nix state or NULL on failure. */ -EvalState * nix_state_create(nix_c_context * context, const char ** searchPath, Store * store); +EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath, Store * store); /** * @brief Frees a Nix state. diff --git a/src/libexpr-c/nix_api_value.cc b/src/libexpr-c/nix_api_value.cc index 2550e975ad70..0366e502008b 100644 --- a/src/libexpr-c/nix_api_value.cc +++ b/src/libexpr-c/nix_api_value.cc @@ -20,7 +20,7 @@ # include "gc_cpp.h" #endif -// Helper function to throw an exception if value is null +// Internal helper functions to check [in] and [out] `Value *` parameters static const nix::Value & check_value_not_null(const Value * value) { if (!value) { @@ -37,6 +37,33 @@ static nix::Value & check_value_not_null(Value * value) return *((nix::Value *) value); } +static const nix::Value & check_value_in(const Value * value) +{ + auto & v = check_value_not_null(value); + if (!v.isValid()) { + throw std::runtime_error("Uninitialized Value"); + } + return v; +} + +static nix::Value & check_value_in(Value * value) +{ + auto & v = check_value_not_null(value); + if (!v.isValid()) { + throw std::runtime_error("Uninitialized Value"); + } + return v; +} + +static nix::Value & check_value_out(Value * value) +{ + auto & v = check_value_not_null(value); + if (v.isValid()) { + throw std::runtime_error("Value already initialized. Variables are immutable"); + } + return v; +} + /** * Helper function to convert calls from nix into C API. * @@ -111,7 +138,7 @@ ValueType nix_get_type(nix_c_context * context, const Value * value) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); using namespace nix; switch (v.type()) { case nThunk: @@ -147,7 +174,7 @@ const char * nix_get_typename(nix_c_context * context, const Value * value) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); auto s = nix::showType(v); return strdup(s.c_str()); } @@ -159,7 +186,7 @@ bool nix_get_bool(nix_c_context * context, const Value * value) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); assert(v.type() == nix::nBool); return v.boolean(); } @@ -171,7 +198,7 @@ nix_err nix_get_string(nix_c_context * context, const Value * value, nix_get_str if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); assert(v.type() == nix::nString); call_nix_get_string_callback(v.c_str(), callback, user_data); } @@ -183,7 +210,7 @@ const char * nix_get_path_string(nix_c_context * context, const Value * value) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); assert(v.type() == nix::nPath); // NOTE (from @yorickvP) // v._path.path should work but may not be how Eelco intended it. @@ -202,7 +229,7 @@ unsigned int nix_get_list_size(nix_c_context * context, const Value * value) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); assert(v.type() == nix::nList); return v.listSize(); } @@ -214,7 +241,7 @@ unsigned int nix_get_attrs_size(nix_c_context * context, const Value * value) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); assert(v.type() == nix::nAttrs); return v.attrs()->size(); } @@ -226,7 +253,7 @@ double nix_get_float(nix_c_context * context, const Value * value) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); assert(v.type() == nix::nFloat); return v.fpoint(); } @@ -238,7 +265,7 @@ int64_t nix_get_int(nix_c_context * context, const Value * value) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); assert(v.type() == nix::nInt); return v.integer(); } @@ -250,7 +277,7 @@ ExternalValue * nix_get_external(nix_c_context * context, Value * value) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_out(value); assert(v.type() == nix::nExternal); return (ExternalValue *) v.external(); } @@ -262,7 +289,7 @@ Value * nix_get_list_byidx(nix_c_context * context, const Value * value, EvalSta if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); assert(v.type() == nix::nList); auto * p = v.listElems()[ix]; nix_gc_incref(nullptr, p); @@ -278,7 +305,7 @@ Value * nix_get_attr_byname(nix_c_context * context, const Value * value, EvalSt if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); assert(v.type() == nix::nAttrs); nix::Symbol s = state->state.symbols.create(name); auto attr = v.attrs()->get(s); @@ -298,7 +325,7 @@ bool nix_has_attr_byname(nix_c_context * context, const Value * value, EvalState if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); assert(v.type() == nix::nAttrs); nix::Symbol s = state->state.symbols.create(name); auto attr = v.attrs()->get(s); @@ -315,7 +342,7 @@ nix_get_attr_byidx(nix_c_context * context, const Value * value, EvalState * sta if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); const nix::Attr & a = (*v.attrs())[i]; *name = ((const std::string &) (state->state.symbols[a.name])).c_str(); nix_gc_incref(nullptr, a.value); @@ -330,7 +357,7 @@ const char * nix_get_attr_name_byidx(nix_c_context * context, const Value * valu if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); const nix::Attr & a = (*v.attrs())[i]; return ((const std::string &) (state->state.symbols[a.name])).c_str(); } @@ -342,7 +369,7 @@ nix_err nix_init_bool(nix_c_context * context, Value * value, bool b) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_out(value); v.mkBool(b); } NIXC_CATCH_ERRS @@ -354,7 +381,7 @@ nix_err nix_init_string(nix_c_context * context, Value * value, const char * str if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_out(value); v.mkString(std::string_view(str)); } NIXC_CATCH_ERRS @@ -365,7 +392,7 @@ nix_err nix_init_path_string(nix_c_context * context, EvalState * s, Value * val if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_out(value); v.mkPath(s->state.rootPath(nix::CanonPath(str))); } NIXC_CATCH_ERRS @@ -376,7 +403,7 @@ nix_err nix_init_float(nix_c_context * context, Value * value, double d) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_out(value); v.mkFloat(d); } NIXC_CATCH_ERRS @@ -387,7 +414,7 @@ nix_err nix_init_int(nix_c_context * context, Value * value, int64_t i) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_out(value); v.mkInt(i); } NIXC_CATCH_ERRS @@ -398,7 +425,7 @@ nix_err nix_init_null(nix_c_context * context, Value * value) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_out(value); v.mkNull(); } NIXC_CATCH_ERRS @@ -422,7 +449,7 @@ nix_err nix_init_external(nix_c_context * context, Value * value, ExternalValue if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_out(value); auto r = (nix::ExternalValueBase *) val; v.mkExternal(r); } @@ -469,7 +496,7 @@ nix_err nix_make_list(nix_c_context * context, ListBuilder * list_builder, Value if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_out(value); v.mkList(list_builder->builder); } NIXC_CATCH_ERRS @@ -480,19 +507,19 @@ nix_err nix_init_primop(nix_c_context * context, Value * value, PrimOp * p) if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_out(value); v.mkPrimOp((nix::PrimOp *) p); } NIXC_CATCH_ERRS } -nix_err nix_copy_value(nix_c_context * context, Value * value, Value * source) +nix_err nix_copy_value(nix_c_context * context, Value * value, const Value * source) { if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); - auto & s = check_value_not_null(source); + auto & v = check_value_out(value); + auto & s = check_value_in(source); v = s; } NIXC_CATCH_ERRS @@ -503,7 +530,7 @@ nix_err nix_make_attrs(nix_c_context * context, Value * value, BindingsBuilder * if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_out(value); v.mkAttrs(b->builder); } NIXC_CATCH_ERRS @@ -550,7 +577,7 @@ nix_realised_string * nix_string_realise(nix_c_context * context, EvalState * st if (context) context->last_err_code = NIX_OK; try { - auto & v = check_value_not_null(value); + auto & v = check_value_in(value); nix::NixStringContext stringContext; auto rawStr = state->state.coerceToString(nix::noPos, v, stringContext, "while realising a string").toOwned(); nix::StorePathSet storePaths; diff --git a/src/libexpr-c/nix_api_value.h b/src/libexpr-c/nix_api_value.h index d8bd77c33c91..b2b3439ef3e3 100644 --- a/src/libexpr-c/nix_api_value.h +++ b/src/libexpr-c/nix_api_value.h @@ -422,7 +422,7 @@ nix_err nix_init_primop(nix_c_context * context, Value * value, PrimOp * op); * @param[in] source value to copy from * @return error code, NIX_OK on success. */ -nix_err nix_copy_value(nix_c_context * context, Value * value, Value * source); +nix_err nix_copy_value(nix_c_context * context, Value * value, const Value * source); /**@}*/ /** @brief Create a bindings builder diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 72da1c465779..d7e3a2cdb0bd 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -15,9 +15,8 @@ #include "function-trace.hh" #include "profiles.hh" #include "print.hh" -#include "fs-input-accessor.hh" -#include "filtering-input-accessor.hh" -#include "memory-input-accessor.hh" +#include "filtering-source-accessor.hh" +#include "memory-source-accessor.hh" #include "signals.hh" #include "gc-small-vector.hh" #include "url.hh" @@ -50,6 +49,7 @@ #include #include +#include #include #include @@ -342,8 +342,10 @@ void initGC() gcInitialised = true; } +static constexpr size_t BASE_ENV_SIZE = 128; + EvalState::EvalState( - const SearchPath & _searchPath, + const LookupPath & _lookupPath, ref store, std::shared_ptr buildStore) : sWith(symbols.create("")) @@ -397,16 +399,16 @@ EvalState::EvalState( , emptyBindings(0) , rootFS( evalSettings.restrictEval || evalSettings.pureEval - ? ref(AllowListInputAccessor::create(makeFSInputAccessor(), {}, + ? ref(AllowListSourceAccessor::create(getFSSourceAccessor(), {}, [](const CanonPath & path) -> RestrictedPathError { auto modeInformation = evalSettings.pureEval ? "in pure evaluation mode (use '--impure' to override)" : "in restricted mode"; throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation); })) - : makeFSInputAccessor()) - , corepkgsFS(makeMemoryInputAccessor()) - , internalFS(makeMemoryInputAccessor()) + : getFSSourceAccessor()) + , corepkgsFS(make_ref()) + , internalFS(make_ref()) , derivationInternal{corepkgsFS->addFile( CanonPath("derivation-internal.nix"), #include "primops/derivation.nix.gen.hh" @@ -424,8 +426,11 @@ EvalState::EvalState( #if HAVE_BOEHMGC , valueAllocCache(std::allocate_shared(traceable_allocator(), nullptr)) , env1AllocCache(std::allocate_shared(traceable_allocator(), nullptr)) + , baseEnvP(std::allocate_shared(traceable_allocator(), &allocEnv(BASE_ENV_SIZE))) + , baseEnv(**baseEnvP) +#else + , baseEnv(allocEnv(BASE_ENV_SIZE)) #endif - , baseEnv(allocEnv(128)) , staticBaseEnv{std::make_shared(nullptr, nullptr)} { corepkgsFS->setPathDisplay(""); @@ -448,16 +453,16 @@ EvalState::EvalState( /* Initialise the Nix expression search path. */ if (!evalSettings.pureEval) { - for (auto & i : _searchPath.elements) - searchPath.elements.emplace_back(SearchPath::Elem {i}); + for (auto & i : _lookupPath.elements) + lookupPath.elements.emplace_back(LookupPath::Elem {i}); for (auto & i : evalSettings.nixPath.get()) - searchPath.elements.emplace_back(SearchPath::Elem::parse(i)); + lookupPath.elements.emplace_back(LookupPath::Elem::parse(i)); } /* Allow access to all paths in the search path. */ - if (rootFS.dynamic_pointer_cast()) - for (auto & i : searchPath.elements) - resolveSearchPathPath(i.path, true); + if (rootFS.dynamic_pointer_cast()) + for (auto & i : lookupPath.elements) + resolveLookupPathPath(i.path, true); corepkgsFS->addFile( CanonPath("fetchurl.nix"), @@ -475,13 +480,13 @@ EvalState::~EvalState() void EvalState::allowPath(const Path & path) { - if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) rootFS2->allowPrefix(CanonPath(path)); } void EvalState::allowPath(const StorePath & storePath) { - if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) rootFS2->allowPrefix(CanonPath(store->toRealPath(storePath))); } @@ -535,13 +540,13 @@ void EvalState::checkURI(const std::string & uri) /* If the URI is a path, then check it against allowedPaths as well. */ if (hasPrefix(uri, "/")) { - if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) rootFS2->checkAccess(CanonPath(uri)); return; } if (hasPrefix(uri, "file://")) { - if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) rootFS2->checkAccess(CanonPath(uri.substr(7))); return; } @@ -2760,12 +2765,12 @@ SourcePath resolveExprPath(SourcePath path) if (++followCount >= maxFollow) throw Error("too many symbolic links encountered while traversing the path '%s'", path); auto p = path.parent().resolveSymlinks() / path.baseName(); - if (p.lstat().type != InputAccessor::tSymlink) break; + if (p.lstat().type != SourceAccessor::tSymlink) break; path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))}; } /* If `path' refers to a directory, append `/default.nix'. */ - if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory) + if (path.resolveSymlinks().lstat().type == SourceAccessor::tDirectory) return path / "default.nix"; return path; @@ -2820,19 +2825,19 @@ Expr * EvalState::parseStdin() SourcePath EvalState::findFile(const std::string_view path) { - return findFile(searchPath, path); + return findFile(lookupPath, path); } -SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos) +SourcePath EvalState::findFile(const LookupPath & lookupPath, const std::string_view path, const PosIdx pos) { - for (auto & i : searchPath.elements) { + for (auto & i : lookupPath.elements) { auto suffixOpt = i.prefix.suffixIfPotentialMatch(path); if (!suffixOpt) continue; auto suffix = *suffixOpt; - auto rOpt = resolveSearchPathPath(i.path); + auto rOpt = resolveLookupPathPath(i.path); if (!rOpt) continue; auto r = *rOpt; @@ -2852,11 +2857,11 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_ } -std::optional EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl) +std::optional EvalState::resolveLookupPathPath(const LookupPath::Path & value0, bool initAccessControl) { auto & value = value0.s; - auto i = searchPathResolved.find(value); - if (i != searchPathResolved.end()) return i->second; + auto i = lookupPathResolved.find(value); + if (i != lookupPathResolved.end()) return i->second; std::optional res; @@ -2912,7 +2917,7 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa else debug("failed to resolve search path element '%s'", value); - searchPathResolved.emplace(value, res); + lookupPathResolved.emplace(value, res); return res; } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index af65fdcbaa32..7ca2d6227b3d 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -9,7 +9,7 @@ #include "symbol-table.hh" #include "config.hh" #include "experimental-features.hh" -#include "input-accessor.hh" +#include "source-accessor.hh" #include "search-path.hh" #include "repl-exit-status.hh" @@ -33,7 +33,10 @@ class EvalState; class StorePath; struct SingleDerivedPath; enum RepairFlag : bool; -struct MemoryInputAccessor; +struct MemorySourceAccessor; +namespace eval_cache { + class EvalCache; +} /** @@ -161,9 +164,6 @@ struct DebugTrace { bool isError; }; -// Don't want Windows function -#undef SearchPath - class EvalState : public std::enable_shared_from_this { public: @@ -229,18 +229,18 @@ public: /** * The accessor for the root filesystem. */ - const ref rootFS; + const ref rootFS; /** * The in-memory filesystem for paths. */ - const ref corepkgsFS; + const ref corepkgsFS; /** * In-memory filesystem for internal, non-user-callable Nix * expressions like call-flake.nix. */ - const ref internalFS; + const ref internalFS; const SourcePath derivationInternal; @@ -285,6 +285,11 @@ public: return *new EvalErrorBuilder(*this, args...); } + /** + * A cache for evaluation caches, so as to reuse the same root value if possible + */ + std::map> evalCaches; + private: /* Cache for calls to addToStore(); maps source paths to the store @@ -311,9 +316,9 @@ private: #endif FileEvalCache fileEvalCache; - SearchPath searchPath; + LookupPath lookupPath; - std::map> searchPathResolved; + std::map> lookupPathResolved; /** * Cache used by prim_match(). @@ -335,12 +340,12 @@ private: public: EvalState( - const SearchPath & _searchPath, + const LookupPath & _lookupPath, ref store, std::shared_ptr buildStore = nullptr); ~EvalState(); - SearchPath getSearchPath() { return searchPath; } + LookupPath getLookupPath() { return lookupPath; } /** * Return a `SourcePath` that refers to `path` in the root @@ -409,7 +414,7 @@ public: * Look up a file in the search path. */ SourcePath findFile(const std::string_view path); - SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos); + SourcePath findFile(const LookupPath & lookupPath, const std::string_view path, const PosIdx pos = noPos); /** * Try to resolve a search path value (not the optional key part). @@ -418,8 +423,8 @@ public: * * If it is not found, return `std::nullopt` */ - std::optional resolveSearchPathPath( - const SearchPath::Path & elem, + std::optional resolveLookupPathPath( + const LookupPath::Path & elem, bool initAccessControl = false); /** @@ -542,6 +547,11 @@ public: */ SingleDerivedPath coerceToSingleDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx); +#if HAVE_BOEHMGC + /** A GC root for the baseEnv reference. */ + std::shared_ptr baseEnvP; +#endif + public: /** diff --git a/src/libexpr/fetchurl.nix b/src/libexpr/fetchurl.nix index 9d1b61d7fef1..85a01d161797 100644 --- a/src/libexpr/fetchurl.nix +++ b/src/libexpr/fetchurl.nix @@ -28,11 +28,8 @@ derivation ({ # No need to double the amount of network traffic preferLocalBuild = true; + # This attribute does nothing; it's here to avoid changing evaluation results. impureEnvVars = [ - # We borrow these environment variables from the caller to allow - # easy proxy configuration. This is impure, but a fixed-output - # derivation like fetchurl is allowed to do so since its result is - # by definition pure. "http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy" ]; diff --git a/src/libexpr/flake/call-flake.nix b/src/libexpr/flake/call-flake.nix index d0ccb1e37b73..a411564df5bb 100644 --- a/src/libexpr/flake/call-flake.nix +++ b/src/libexpr/flake/call-flake.nix @@ -4,7 +4,7 @@ lockFileStr: # A mapping of lock file node IDs to { sourceInfo, subdir } attrsets, -# with sourceInfo.outPath providing an InputAccessor to a previously +# with sourceInfo.outPath providing an SourceAccessor to a previously # fetched tree. This is necessary for possibly unlocked inputs, in # particular the root input, but also --override-inputs pointing to # unlocked trees. diff --git a/src/libexpr/flake/config.cc b/src/libexpr/flake/config.cc index 3c7ed5d8a5b3..e0c5d45121d5 100644 --- a/src/libexpr/flake/config.cc +++ b/src/libexpr/flake/config.cc @@ -32,7 +32,7 @@ static void writeTrustedList(const TrustedList & trustedList) void ConfigFile::apply() { - std::set whitelist{"bash-prompt", "bash-prompt-prefix", "bash-prompt-suffix", "flake-registry", "commit-lockfile-summary"}; + std::set whitelist{"bash-prompt", "bash-prompt-prefix", "bash-prompt-suffix", "flake-registry", "commit-lock-file-summary", "commit-lockfile-summary"}; for (auto & [name, value] : settings) { diff --git a/src/libexpr/flake/flakeref.hh b/src/libexpr/flake/flakeref.hh index 5d78f49b6834..04c812ed099b 100644 --- a/src/libexpr/flake/flakeref.hh +++ b/src/libexpr/flake/flakeref.hh @@ -68,24 +68,39 @@ struct FlakeRef std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef); +/** + * @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) + */ FlakeRef parseFlakeRef( const std::string & url, const std::optional & baseDir = {}, bool allowMissing = false, bool isFlake = true); +/** + * @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) + */ std::optional maybeParseFlake( const std::string & url, const std::optional & baseDir = {}); +/** + * @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) + */ std::pair parseFlakeRefWithFragment( const std::string & url, const std::optional & baseDir = {}, bool allowMissing = false, bool isFlake = true); +/** + * @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) + */ std::optional> maybeParseFlakeRefWithFragment( const std::string & url, const std::optional & baseDir = {}); +/** + * @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) + */ std::tuple parseFlakeRefWithFragmentAndExtendedOutputsSpec( const std::string & url, const std::optional & baseDir = {}, diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index e3cae8385cf4..e37e3bdd1534 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -92,10 +92,10 @@ struct ExprString : Expr struct ExprPath : Expr { - ref accessor; + ref accessor; std::string s; Value v; - ExprPath(ref accessor, std::string s) : accessor(accessor), s(std::move(s)) + ExprPath(ref accessor, std::string s) : accessor(accessor), s(std::move(s)) { v.mkPath(&*accessor, this->s.c_str()); } diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index 024e79c432e6..5a928e9aadb3 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -44,7 +44,7 @@ struct ParserState Expr * result; SourcePath basePath; PosTable::Origin origin; - const ref rootFS; + const ref rootFS; const Expr::AstSymbols & s; void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index bff0661703d1..00300449f6f9 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -41,7 +41,7 @@ Expr * parseExprFromBuf( const SourcePath & basePath, SymbolTable & symbols, PosTable & positions, - const ref rootFS, + const ref rootFS, const Expr::AstSymbols & astSymbols); } @@ -291,7 +291,7 @@ path_start /* add back in the trailing '/' to the first segment */ if ($1.p[$1.l-1] == '/' && $1.l > 1) path += "/"; - $$ = new ExprPath(ref(state->rootFS), std::move(path)); + $$ = new ExprPath(ref(state->rootFS), std::move(path)); } | HPATH { if (evalSettings.pureEval) { @@ -301,7 +301,7 @@ path_start ); } Path path(getHome() + std::string($1.p + 1, $1.l - 1)); - $$ = new ExprPath(ref(state->rootFS), std::move(path)); + $$ = new ExprPath(ref(state->rootFS), std::move(path)); } ; @@ -430,7 +430,7 @@ Expr * parseExprFromBuf( const SourcePath & basePath, SymbolTable & symbols, PosTable & positions, - const ref rootFS, + const ref rootFS, const Expr::AstSymbols & astSymbols) { yyscan_t scanner; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index f03acc2daabf..6b947b40d48c 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -15,7 +15,6 @@ #include "value-to-json.hh" #include "value-to-xml.hh" #include "primops.hh" -#include "fs-input-accessor.hh" #include "fetch-to-store.hh" #include @@ -1185,11 +1184,11 @@ static void derivationStrictInternal( .debugThrow(); /* !!! Check whether j is a valid attribute name. */ - /* Derivations cannot be named ‘drv’, because - then we'd have an attribute ‘drvPath’ in - the resulting set. */ - if (j == "drv") - state.error("invalid derivation output name 'drv'") + /* Derivations cannot be named ‘drvPath’, because + we already have an attribute ‘drvPath’ in + the resulting set (see state.sDrvPath). */ + if (j == "drvPath") + state.error("invalid derivation output name 'drvPath'") .atPos(v) .debugThrow(); outputs.insert(j); @@ -1716,7 +1715,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V { state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.findFile"); - SearchPath searchPath; + LookupPath lookupPath; for (auto v2 : args[0]->listItems()) { state.forceAttrs(*v2, pos, "while evaluating an element of the list passed to builtins.findFile"); @@ -1744,15 +1743,15 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V ).atPos(pos).debugThrow(); } - searchPath.elements.emplace_back(SearchPath::Elem { - .prefix = SearchPath::Prefix { .s = prefix }, - .path = SearchPath::Path { .s = path }, + lookupPath.elements.emplace_back(LookupPath::Elem { + .prefix = LookupPath::Prefix { .s = prefix }, + .path = LookupPath::Path { .s = path }, }); } auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile"); - v.mkPath(state.findFile(searchPath, path, pos)); + v.mkPath(state.findFile(lookupPath, path, pos)); } static RegisterPrimOp primop_findFile(PrimOp { @@ -1828,12 +1827,12 @@ static RegisterPrimOp primop_hashFile({ .fun = prim_hashFile, }); -static Value * fileTypeToString(EvalState & state, InputAccessor::Type type) +static Value * fileTypeToString(EvalState & state, SourceAccessor::Type type) { return - type == InputAccessor::Type::tRegular ? &state.vStringRegular : - type == InputAccessor::Type::tDirectory ? &state.vStringDirectory : - type == InputAccessor::Type::tSymlink ? &state.vStringSymlink : + type == SourceAccessor::Type::tRegular ? &state.vStringRegular : + type == SourceAccessor::Type::tDirectory ? &state.vStringDirectory : + type == SourceAccessor::Type::tSymlink ? &state.vStringSymlink : &state.vStringUnknown; } @@ -4629,8 +4628,8 @@ void EvalState::createBaseEnv() }); /* Add a value containing the current Nix expression search path. */ - auto list = buildList(searchPath.elements.size()); - for (const auto & [n, i] : enumerate(searchPath.elements)) { + auto list = buildList(lookupPath.elements.size()); + for (const auto & [n, i] : enumerate(lookupPath.elements)) { auto attrs = buildBindings(2); attrs.alloc("path").mkString(i.path.s); attrs.alloc("prefix").mkString(i.prefix.s); diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index 2d3013132f73..8c3f1b4e8b0f 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -14,8 +14,11 @@ static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, static RegisterPrimOp primop_unsafeDiscardStringContext({ .name = "__unsafeDiscardStringContext", - .arity = 1, - .fun = prim_unsafeDiscardStringContext + .args = {"s"}, + .doc = R"( + Discard the [string context](@docroot@/language/string-context.md) from a value that can be coerced to a string. + )", + .fun = prim_unsafeDiscardStringContext, }); @@ -75,7 +78,11 @@ static RegisterPrimOp primop_unsafeDiscardOutputDependency({ .name = "__unsafeDiscardOutputDependency", .args = {"s"}, .doc = R"( - Create a copy of the given string where every "derivation deep" string context element is turned into a constant string context element. + Create a copy of the given string where every + [derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep) + string context element is turned into a + [constant](@docroot@/language/string-context.md#string-context-element-constant) + string context element. This is the opposite of [`builtins.addDrvOutputDependencies`](#builtins-addDrvOutputDependencies). @@ -137,7 +144,11 @@ static RegisterPrimOp primop_addDrvOutputDependencies({ .name = "__addDrvOutputDependencies", .args = {"s"}, .doc = R"( - Create a copy of the given string where a single constant string context element is turned into a "derivation deep" string context element. + Create a copy of the given string where a single + [constant](@docroot@/language/string-context.md#string-context-element-constant) + string context element is turned into a + [derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep) + string context element. The store path that is the constant string context element should point to a valid derivation, and end in `.drv`. diff --git a/src/libexpr/search-path.cc b/src/libexpr/search-path.cc index e2c3e050ae0d..657744e745c8 100644 --- a/src/libexpr/search-path.cc +++ b/src/libexpr/search-path.cc @@ -2,7 +2,7 @@ namespace nix { -std::optional SearchPath::Prefix::suffixIfPotentialMatch( +std::optional LookupPath::Prefix::suffixIfPotentialMatch( std::string_view path) const { auto n = s.size(); @@ -27,11 +27,11 @@ std::optional SearchPath::Prefix::suffixIfPotentialMatch( } -SearchPath::Elem SearchPath::Elem::parse(std::string_view rawElem) +LookupPath::Elem LookupPath::Elem::parse(std::string_view rawElem) { size_t pos = rawElem.find('='); - return SearchPath::Elem { + return LookupPath::Elem { .prefix = Prefix { .s = pos == std::string::npos ? std::string { "" } @@ -44,11 +44,11 @@ SearchPath::Elem SearchPath::Elem::parse(std::string_view rawElem) } -SearchPath SearchPath::parse(const Strings & rawElems) +LookupPath LookupPath::parse(const Strings & rawElems) { - SearchPath res; + LookupPath res; for (auto & rawElem : rawElems) - res.elements.emplace_back(SearchPath::Elem::parse(rawElem)); + res.elements.emplace_back(LookupPath::Elem::parse(rawElem)); return res; } diff --git a/src/libexpr/search-path.hh b/src/libexpr/search-path.hh index 231752ea66c5..acd843638536 100644 --- a/src/libexpr/search-path.hh +++ b/src/libexpr/search-path.hh @@ -8,17 +8,14 @@ namespace nix { -// Do not want the windows macro (alias to `SearchPathA`) -#undef SearchPath - /** * A "search path" is a list of ways look for something, used with * `builtins.findFile` and `< >` lookup expressions. */ -struct SearchPath +struct LookupPath { /** - * A single element of a `SearchPath`. + * A single element of a `LookupPath`. * * Each element is tried in succession when looking up a path. The first * element to completely match wins. @@ -26,16 +23,16 @@ struct SearchPath struct Elem; /** - * The first part of a `SearchPath::Elem` pair. + * The first part of a `LookupPath::Elem` pair. * * Called a "prefix" because it takes the form of a prefix of a file * path (first `n` path components). When looking up a path, to use - * a `SearchPath::Elem`, its `Prefix` must match the path. + * a `LookupPath::Elem`, its `Prefix` must match the path. */ struct Prefix; /** - * The second part of a `SearchPath::Elem` pair. + * The second part of a `LookupPath::Elem` pair. * * It is either a path or a URL (with certain restrictions / extra * structure). @@ -43,7 +40,7 @@ struct SearchPath * If the prefix of the path we are looking up matches, we then * check if the rest of the path points to something that exists * within the directory denoted by this. If so, the - * `SearchPath::Elem` as a whole matches, and that *something* being + * `LookupPath::Elem` as a whole matches, and that *something* being * pointed to by the rest of the path we are looking up is the * result. */ @@ -54,24 +51,24 @@ struct SearchPath * when looking up. (The actual lookup entry point is in `EvalState` * not in this class.) */ - std::list elements; + std::list elements; /** - * Parse a string into a `SearchPath` + * Parse a string into a `LookupPath` */ - static SearchPath parse(const Strings & rawElems); + static LookupPath parse(const Strings & rawElems); }; -struct SearchPath::Prefix +struct LookupPath::Prefix { /** * Underlying string * - * @todo Should we normalize this when constructing a `SearchPath::Prefix`? + * @todo Should we normalize this when constructing a `LookupPath::Prefix`? */ std::string s; - GENERATE_CMP(SearchPath::Prefix, me->s); + GENERATE_CMP(LookupPath::Prefix, me->s); /** * If the path possibly matches this search path element, return the @@ -82,7 +79,7 @@ struct SearchPath::Prefix std::optional suffixIfPotentialMatch(std::string_view path) const; }; -struct SearchPath::Path +struct LookupPath::Path { /** * The location of a search path item, as a path or URL. @@ -91,21 +88,21 @@ struct SearchPath::Path */ std::string s; - GENERATE_CMP(SearchPath::Path, me->s); + GENERATE_CMP(LookupPath::Path, me->s); }; -struct SearchPath::Elem +struct LookupPath::Elem { Prefix prefix; Path path; - GENERATE_CMP(SearchPath::Elem, me->prefix, me->path); + GENERATE_CMP(LookupPath::Elem, me->prefix, me->path); /** - * Parse a string into a `SearchPath::Elem` + * Parse a string into a `LookupPath::Elem` */ - static SearchPath::Elem parse(std::string_view rawElem); + static LookupPath::Elem parse(std::string_view rawElem); }; } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 7ed3fa5a9ff1..61cf2d310641 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -7,7 +7,6 @@ #include "symbol-table.hh" #include "value/context.hh" -#include "input-accessor.hh" #include "source-path.hh" #include "print-options.hh" @@ -23,6 +22,7 @@ class BindingsBuilder; typedef enum { + tUninitialized = 0, tInt = 1, tBool, tString, @@ -166,7 +166,7 @@ public: struct Value { private: - InternalType internalType; + InternalType internalType = tUninitialized; friend std::string showType(const Value & v); @@ -216,7 +216,7 @@ public: }; struct Path { - InputAccessor * accessor; + SourceAccessor * accessor; const char * path; }; @@ -270,6 +270,7 @@ public: inline ValueType type(bool invalidIsThunk = false) const { switch (internalType) { + case tUninitialized: break; case tInt: return nInt; case tBool: return nBool; case tString: return nString; @@ -294,6 +295,16 @@ public: internalType = newType; } + /** + * A value becomes valid when it is initialized. We don't use this + * in the evaluator; only in the bindings, where the slight extra + * cost is warranted because of inexperienced callers. + */ + inline bool isValid() const + { + return internalType != tUninitialized; + } + inline void mkInt(NixInt n) { finishValue(tInt, { .integer = n }); @@ -323,7 +334,7 @@ public: void mkPath(const SourcePath & path); void mkPath(std::string_view path); - inline void mkPath(InputAccessor * accessor, const char * path) + inline void mkPath(SourceAccessor * accessor, const char * path) { finishValue(tPath, { .path = { .accessor = accessor, .path = path } }); } diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc index e071b4717b48..7019b0325d7f 100644 --- a/src/libfetchers/cache.cc +++ b/src/libfetchers/cache.cc @@ -11,12 +11,11 @@ namespace nix::fetchers { static const char * schema = R"sql( create table if not exists Cache ( - input text not null, - info text not null, - path text not null, - immutable integer not null, + domain text not null, + key text not null, + value text not null, timestamp integer not null, - primary key (input) + primary key (domain, key) ); )sql"; @@ -28,7 +27,7 @@ struct CacheImpl : Cache struct State { SQLite db; - SQLiteStmt add, lookup; + SQLiteStmt upsert, lookup; }; Sync _state; @@ -37,136 +36,129 @@ struct CacheImpl : Cache { auto state(_state.lock()); - auto dbPath = getCacheDir() + "/nix/fetcher-cache-v1.sqlite"; + auto dbPath = getCacheDir() + "/nix/fetcher-cache-v2.sqlite"; createDirs(dirOf(dbPath)); state->db = SQLite(dbPath); state->db.isCache(); state->db.exec(schema); - state->add.create(state->db, - "insert or replace into Cache(input, info, path, immutable, timestamp) values (?, ?, ?, ?, ?)"); + state->upsert.create(state->db, + "insert or replace into Cache(domain, key, value, timestamp) values (?, ?, ?, ?)"); state->lookup.create(state->db, - "select info, path, immutable, timestamp from Cache where input = ?"); + "select value, timestamp from Cache where domain = ? and key = ?"); } void upsert( - const Attrs & inAttrs, - const Attrs & infoAttrs) override + const Key & key, + const Attrs & value) override { - _state.lock()->add.use() - (attrsToJSON(inAttrs).dump()) - (attrsToJSON(infoAttrs).dump()) - ("") // no path - (false) + _state.lock()->upsert.use() + (key.first) + (attrsToJSON(key.second).dump()) + (attrsToJSON(value).dump()) (time(0)).exec(); } - std::optional lookup(const Attrs & inAttrs) override + std::optional lookup( + const Key & key) override { - if (auto res = lookupExpired(inAttrs)) - return std::move(res->infoAttrs); + if (auto res = lookupExpired(key)) + return std::move(res->value); return {}; } - std::optional lookupWithTTL(const Attrs & inAttrs) override + std::optional lookupWithTTL( + const Key & key) override { - if (auto res = lookupExpired(inAttrs)) { + if (auto res = lookupExpired(key)) { if (!res->expired) - return std::move(res->infoAttrs); - debug("ignoring expired cache entry '%s'", - attrsToJSON(inAttrs).dump()); + return std::move(res->value); + debug("ignoring expired cache entry '%s:%s'", + key.first, attrsToJSON(key.second).dump()); } return {}; } - std::optional lookupExpired(const Attrs & inAttrs) override + std::optional lookupExpired( + const Key & key) override { auto state(_state.lock()); - auto inAttrsJSON = attrsToJSON(inAttrs).dump(); + auto keyJSON = attrsToJSON(key.second).dump(); - auto stmt(state->lookup.use()(inAttrsJSON)); + auto stmt(state->lookup.use()(key.first)(keyJSON)); if (!stmt.next()) { - debug("did not find cache entry for '%s'", inAttrsJSON); + debug("did not find cache entry for '%s:%s'", key.first, keyJSON); return {}; } - auto infoJSON = stmt.getStr(0); - auto locked = stmt.getInt(2) != 0; - auto timestamp = stmt.getInt(3); + auto valueJSON = stmt.getStr(0); + auto timestamp = stmt.getInt(1); - debug("using cache entry '%s' -> '%s'", inAttrsJSON, infoJSON); + debug("using cache entry '%s:%s' -> '%s'", key.first, keyJSON, valueJSON); - return Result2 { - .expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)), - .infoAttrs = jsonToAttrs(nlohmann::json::parse(infoJSON)), + return Result { + .expired = settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0), + .value = jsonToAttrs(nlohmann::json::parse(valueJSON)), }; } - void add( + void upsert( + Key key, Store & store, - const Attrs & inAttrs, - const Attrs & infoAttrs, - const StorePath & storePath, - bool locked) override + Attrs value, + const StorePath & storePath) override { - _state.lock()->add.use() - (attrsToJSON(inAttrs).dump()) - (attrsToJSON(infoAttrs).dump()) - (store.printStorePath(storePath)) - (locked) - (time(0)).exec(); - } + /* Add the store prefix to the cache key to handle multiple + store prefixes. */ + key.second.insert_or_assign("store", store.storeDir); - std::optional> lookup( - Store & store, - const Attrs & inAttrs) override - { - if (auto res = lookupExpired(store, inAttrs)) { - if (!res->expired) - return std::make_pair(std::move(res->infoAttrs), std::move(res->storePath)); - debug("ignoring expired cache entry '%s'", - attrsToJSON(inAttrs).dump()); - } - return {}; + value.insert_or_assign("storePath", (std::string) storePath.to_string()); + + upsert(key, value); } - std::optional lookupExpired( - Store & store, - const Attrs & inAttrs) override + std::optional lookupStorePath( + Key key, + Store & store) override { - auto state(_state.lock()); + key.second.insert_or_assign("store", store.storeDir); - auto inAttrsJSON = attrsToJSON(inAttrs).dump(); + auto res = lookupExpired(key); + if (!res) return std::nullopt; - auto stmt(state->lookup.use()(inAttrsJSON)); - if (!stmt.next()) { - debug("did not find cache entry for '%s'", inAttrsJSON); - return {}; - } + auto storePathS = getStrAttr(res->value, "storePath"); + res->value.erase("storePath"); - auto infoJSON = stmt.getStr(0); - auto storePath = store.parseStorePath(stmt.getStr(1)); - auto locked = stmt.getInt(2) != 0; - auto timestamp = stmt.getInt(3); + ResultWithStorePath res2(*res, StorePath(storePathS)); - store.addTempRoot(storePath); - if (!store.isValidPath(storePath)) { + store.addTempRoot(res2.storePath); + if (!store.isValidPath(res2.storePath)) { // FIXME: we could try to substitute 'storePath'. - debug("ignoring disappeared cache entry '%s'", inAttrsJSON); - return {}; + debug("ignoring disappeared cache entry '%s:%s' -> '%s'", + key.first, + attrsToJSON(key.second).dump(), + store.printStorePath(res2.storePath)); + return std::nullopt; } - debug("using cache entry '%s' -> '%s', '%s'", - inAttrsJSON, infoJSON, store.printStorePath(storePath)); + debug("using cache entry '%s:%s' -> '%s', '%s'", + key.first, + attrsToJSON(key.second).dump(), + attrsToJSON(res2.value).dump(), + store.printStorePath(res2.storePath)); - return Result { - .expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)), - .infoAttrs = jsonToAttrs(nlohmann::json::parse(infoJSON)), - .storePath = std::move(storePath) - }; + return res2; + } + + std::optional lookupStorePathWithTTL( + Key key, + Store & store) override + { + auto res = lookupStorePath(std::move(key), store); + return res && !res->expired ? res : std::nullopt; } }; diff --git a/src/libfetchers/cache.hh b/src/libfetchers/cache.hh index 791d77025aaa..4d834fe0ca33 100644 --- a/src/libfetchers/cache.hh +++ b/src/libfetchers/cache.hh @@ -15,61 +15,80 @@ struct Cache virtual ~Cache() { } /** - * Add a value to the cache. The cache is an arbitrary mapping of - * Attrs to Attrs. + * A domain is a partition of the key/value cache for a particular + * purpose, e.g. git revision to revcount. + */ + using Domain = std::string_view; + + /** + * A cache key is a domain and an arbitrary set of attributes. + */ + using Key = std::pair; + + /** + * Add a key/value pair to the cache. */ virtual void upsert( - const Attrs & inAttrs, - const Attrs & infoAttrs) = 0; + const Key & key, + const Attrs & value) = 0; /** * Look up a key with infinite TTL. */ virtual std::optional lookup( - const Attrs & inAttrs) = 0; + const Key & key) = 0; /** * Look up a key. Return nothing if its TTL has exceeded * `settings.tarballTTL`. */ virtual std::optional lookupWithTTL( - const Attrs & inAttrs) = 0; + const Key & key) = 0; - struct Result2 + struct Result { bool expired = false; - Attrs infoAttrs; + Attrs value; }; /** * Look up a key. Return a bool denoting whether its TTL has * exceeded `settings.tarballTTL`. */ - virtual std::optional lookupExpired( - const Attrs & inAttrs) = 0; - - /* Old cache for things that have a store path. */ - virtual void add( - Store & store, - const Attrs & inAttrs, - const Attrs & infoAttrs, - const StorePath & storePath, - bool locked) = 0; + virtual std::optional lookupExpired( + const Key & key) = 0; - virtual std::optional> lookup( + /** + * Insert a cache entry that has a store path associated with + * it. Such cache entries are always considered stale if the + * associated store path is invalid. + */ + virtual void upsert( + Key key, Store & store, - const Attrs & inAttrs) = 0; + Attrs value, + const StorePath & storePath) = 0; - struct Result + struct ResultWithStorePath : Result { - bool expired = false; - Attrs infoAttrs; StorePath storePath; }; - virtual std::optional lookupExpired( - Store & store, - const Attrs & inAttrs) = 0; + /** + * Look up a store path in the cache. The returned store path will + * be valid, but it may be expired. + */ + virtual std::optional lookupStorePath( + Key key, + Store & store) = 0; + + /** + * Look up a store path in the cache. Return nothing if its TTL + * has exceeded `settings.tarballTTL`. + */ + virtual std::optional lookupStorePathWithTTL( + Key key, + Store & store) = 0; }; ref getCache(); diff --git a/src/libfetchers/fetch-settings.hh b/src/libfetchers/fetch-settings.hh index d085f0d82775..50cd4d161be2 100644 --- a/src/libfetchers/fetch-settings.hh +++ b/src/libfetchers/fetch-settings.hh @@ -87,12 +87,12 @@ struct FetchSettings : public Config {}, true, Xp::Flakes}; Setting commitLockFileSummary{ - this, "", "commit-lockfile-summary", + this, "", "commit-lock-file-summary", R"( The commit summary to use when committing changed flake lock files. If empty, the summary is generated based on the action performed. )", - {}, true, Xp::Flakes}; + {"commit-lockfile-summary"}, true, Xp::Flakes}; Setting trustTarballsFromGitForges{ this, true, "trust-tarballs-from-git-forges", diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc index 398286065e62..65aa72a6c36a 100644 --- a/src/libfetchers/fetch-to-store.cc +++ b/src/libfetchers/fetch-to-store.cc @@ -14,22 +14,20 @@ StorePath fetchToStore( RepairFlag repair) { // FIXME: add an optimisation for the case where the accessor is - // an FSInputAccessor pointing to a store path. + // a `PosixSourceAccessor` pointing to a store path. - std::optional cacheKey; + std::optional cacheKey; if (!filter && path.accessor->fingerprint) { - cacheKey = fetchers::Attrs{ - {"_what", "fetchToStore"}, - {"store", store.storeDir}, + cacheKey = fetchers::Cache::Key{"fetchToStore", { {"name", std::string{name}}, {"fingerprint", *path.accessor->fingerprint}, {"method", std::string{method.render()}}, {"path", path.path.abs()} - }; - if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) { + }}; + if (auto res = fetchers::getCache()->lookupStorePath(*cacheKey, store)) { debug("store path cache hit for '%s'", path); - return res->second; + return res->storePath; } } else debug("source path '%s' is uncacheable", path); @@ -42,15 +40,14 @@ StorePath fetchToStore( auto storePath = mode == FetchMode::DryRun ? store.computeStorePath( - name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2).first + name, path, method, HashAlgorithm::SHA256, {}, filter2).first : store.addToStore( - name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2, repair); + name, path, method, HashAlgorithm::SHA256, {}, filter2, repair); if (cacheKey && mode == FetchMode::Copy) - fetchers::getCache()->add(store, *cacheKey, {}, storePath, true); + fetchers::getCache()->upsert(*cacheKey, store, {}, storePath); return storePath; } - } diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index a06d931db6ce..73923907c3a8 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -1,6 +1,5 @@ #include "fetchers.hh" #include "store-api.hh" -#include "input-accessor.hh" #include "source-path.hh" #include "fetch-to-store.hh" #include "json-utils.hh" @@ -238,7 +237,7 @@ void InputScheme::checkLocks(const Input & specified, const Input & final) const } } -std::pair, Input> Input::getAccessor(ref store) const +std::pair, Input> Input::getAccessor(ref store) const { try { auto [accessor, final] = getAccessorUnchecked(store); @@ -252,7 +251,7 @@ std::pair, Input> Input::getAccessor(ref store) const } } -std::pair, Input> Input::getAccessorUnchecked(ref store) const +std::pair, Input> Input::getAccessorUnchecked(ref store) const { // FIXME: cache the accessor @@ -419,9 +418,13 @@ namespace nlohmann { using namespace nix; fetchers::PublicKey adl_serializer::from_json(const json & json) { - auto type = optionalValueAt(json, "type").value_or("ssh-ed25519"); - auto key = valueAt(json, "key"); - return fetchers::PublicKey { getString(type), getString(key) }; + fetchers::PublicKey res = { }; + if (auto type = optionalValueAt(json, "type")) + res.type = getString(*type); + + res.key = getString(valueAt(json, "key")); + + return res; } void adl_serializer::to_json(json & json, fetchers::PublicKey p) { diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index bb21c68cc834..551be9a1f9aa 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -11,7 +11,7 @@ #include #include -namespace nix { class Store; class StorePath; struct InputAccessor; } +namespace nix { class Store; class StorePath; struct SourceAccessor; } namespace nix::fetchers { @@ -84,15 +84,15 @@ public: std::pair fetchToStore(ref store) const; /** - * Return an InputAccessor that allows access to files in the + * Return a `SourceAccessor` that allows access to files in the * input without copying it to the store. Also return a possibly * unlocked input. */ - std::pair, Input> getAccessor(ref store) const; + std::pair, Input> getAccessor(ref store) const; private: - std::pair, Input> getAccessorUnchecked(ref store) const; + std::pair, Input> getAccessorUnchecked(ref store) const; public: @@ -185,7 +185,7 @@ struct InputScheme std::string_view contents, std::optional commitMsg) const; - virtual std::pair, Input> getAccessor(ref store, const Input & input) const = 0; + virtual std::pair, Input> getAccessor(ref store, const Input & input) const = 0; /** * Is this `InputScheme` part of an experimental feature? @@ -230,6 +230,8 @@ struct PublicKey { std::string type = "ssh-ed25519"; std::string key; + + auto operator <=>(const PublicKey &) const = default; }; std::string publicKeys_to_string(const std::vector&); diff --git a/src/libfetchers/filtering-input-accessor.cc b/src/libfetchers/filtering-source-accessor.cc similarity index 58% rename from src/libfetchers/filtering-input-accessor.cc rename to src/libfetchers/filtering-source-accessor.cc index e0cbfd905c7f..dfd9e536d203 100644 --- a/src/libfetchers/filtering-input-accessor.cc +++ b/src/libfetchers/filtering-source-accessor.cc @@ -1,25 +1,25 @@ -#include "filtering-input-accessor.hh" +#include "filtering-source-accessor.hh" namespace nix { -std::string FilteringInputAccessor::readFile(const CanonPath & path) +std::string FilteringSourceAccessor::readFile(const CanonPath & path) { checkAccess(path); return next->readFile(prefix / path); } -bool FilteringInputAccessor::pathExists(const CanonPath & path) +bool FilteringSourceAccessor::pathExists(const CanonPath & path) { return isAllowed(path) && next->pathExists(prefix / path); } -std::optional FilteringInputAccessor::maybeLstat(const CanonPath & path) +std::optional FilteringSourceAccessor::maybeLstat(const CanonPath & path) { checkAccess(path); return next->maybeLstat(prefix / path); } -InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path) +SourceAccessor::DirEntries FilteringSourceAccessor::readDirectory(const CanonPath & path) { checkAccess(path); DirEntries entries; @@ -30,18 +30,18 @@ InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath return entries; } -std::string FilteringInputAccessor::readLink(const CanonPath & path) +std::string FilteringSourceAccessor::readLink(const CanonPath & path) { checkAccess(path); return next->readLink(prefix / path); } -std::string FilteringInputAccessor::showPath(const CanonPath & path) +std::string FilteringSourceAccessor::showPath(const CanonPath & path) { return displayPrefix + next->showPath(prefix / path) + displaySuffix; } -void FilteringInputAccessor::checkAccess(const CanonPath & path) +void FilteringSourceAccessor::checkAccess(const CanonPath & path) { if (!isAllowed(path)) throw makeNotAllowedError @@ -49,15 +49,15 @@ void FilteringInputAccessor::checkAccess(const CanonPath & path) : RestrictedPathError("access to path '%s' is forbidden", showPath(path)); } -struct AllowListInputAccessorImpl : AllowListInputAccessor +struct AllowListSourceAccessorImpl : AllowListSourceAccessor { std::set allowedPrefixes; - AllowListInputAccessorImpl( - ref next, + AllowListSourceAccessorImpl( + ref next, std::set && allowedPrefixes, MakeNotAllowedError && makeNotAllowedError) - : AllowListInputAccessor(SourcePath(next), std::move(makeNotAllowedError)) + : AllowListSourceAccessor(SourcePath(next), std::move(makeNotAllowedError)) , allowedPrefixes(std::move(allowedPrefixes)) { } @@ -72,15 +72,15 @@ struct AllowListInputAccessorImpl : AllowListInputAccessor } }; -ref AllowListInputAccessor::create( - ref next, +ref AllowListSourceAccessor::create( + ref next, std::set && allowedPrefixes, MakeNotAllowedError && makeNotAllowedError) { - return make_ref(next, std::move(allowedPrefixes), std::move(makeNotAllowedError)); + return make_ref(next, std::move(allowedPrefixes), std::move(makeNotAllowedError)); } -bool CachingFilteringInputAccessor::isAllowed(const CanonPath & path) +bool CachingFilteringSourceAccessor::isAllowed(const CanonPath & path) { auto i = cache.find(path); if (i != cache.end()) return i->second; diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-source-accessor.hh similarity index 70% rename from src/libfetchers/filtering-input-accessor.hh rename to src/libfetchers/filtering-source-accessor.hh index 133a6cee3d09..9ec7bc21f0ee 100644 --- a/src/libfetchers/filtering-input-accessor.hh +++ b/src/libfetchers/filtering-source-accessor.hh @@ -1,6 +1,5 @@ #pragma once -#include "input-accessor.hh" #include "source-path.hh" namespace nix { @@ -13,17 +12,17 @@ namespace nix { typedef std::function MakeNotAllowedError; /** - * An abstract wrapping `InputAccessor` that performs access + * An abstract wrapping `SourceAccessor` that performs access * control. Subclasses should override `isAllowed()` to implement an * access control policy. The error message is customized at construction. */ -struct FilteringInputAccessor : InputAccessor +struct FilteringSourceAccessor : SourceAccessor { - ref next; + ref next; CanonPath prefix; MakeNotAllowedError makeNotAllowedError; - FilteringInputAccessor(const SourcePath & src, MakeNotAllowedError && makeNotAllowedError) + FilteringSourceAccessor(const SourcePath & src, MakeNotAllowedError && makeNotAllowedError) : next(src.accessor) , prefix(src.path) , makeNotAllowedError(std::move(makeNotAllowedError)) @@ -56,32 +55,32 @@ struct FilteringInputAccessor : InputAccessor }; /** - * A wrapping `InputAccessor` that checks paths against a set of + * A wrapping `SourceAccessor` that checks paths against a set of * allowed prefixes. */ -struct AllowListInputAccessor : public FilteringInputAccessor +struct AllowListSourceAccessor : public FilteringSourceAccessor { /** * Grant access to the specified prefix. */ virtual void allowPrefix(CanonPath prefix) = 0; - static ref create( - ref next, + static ref create( + ref next, std::set && allowedPrefixes, MakeNotAllowedError && makeNotAllowedError); - using FilteringInputAccessor::FilteringInputAccessor; + using FilteringSourceAccessor::FilteringSourceAccessor; }; /** - * A wrapping `InputAccessor` mix-in where `isAllowed()` caches the result of virtual `isAllowedUncached()`. + * A wrapping `SourceAccessor` mix-in where `isAllowed()` caches the result of virtual `isAllowedUncached()`. */ -struct CachingFilteringInputAccessor : FilteringInputAccessor +struct CachingFilteringSourceAccessor : FilteringSourceAccessor { std::map cache; - using FilteringInputAccessor::FilteringInputAccessor; + using FilteringSourceAccessor::FilteringSourceAccessor; bool isAllowed(const CanonPath & path) override; diff --git a/src/libfetchers/fs-input-accessor.cc b/src/libfetchers/fs-input-accessor.cc deleted file mode 100644 index 2bbe53e11bdf..000000000000 --- a/src/libfetchers/fs-input-accessor.cc +++ /dev/null @@ -1,39 +0,0 @@ -#include "fs-input-accessor.hh" -#include "posix-source-accessor.hh" -#include "store-api.hh" - -namespace nix { - -struct FSInputAccessor : InputAccessor, PosixSourceAccessor -{ - using PosixSourceAccessor::PosixSourceAccessor; -}; - -ref makeFSInputAccessor() -{ - return make_ref(); -} - -ref makeFSInputAccessor(std::filesystem::path root) -{ - return make_ref(std::move(root)); -} - -ref makeStorePathAccessor( - ref store, - const StorePath & storePath) -{ - // FIXME: should use `store->getFSAccessor()` - auto root = std::filesystem::path { store->toRealPath(storePath) }; - auto accessor = makeFSInputAccessor(root); - accessor->setPathDisplay(root.string()); - return accessor; -} - -SourcePath getUnfilteredRootPath(CanonPath path) -{ - static auto rootFS = makeFSInputAccessor(); - return {rootFS, path}; -} - -} diff --git a/src/libfetchers/fs-input-accessor.hh b/src/libfetchers/fs-input-accessor.hh deleted file mode 100644 index e60906bd8273..000000000000 --- a/src/libfetchers/fs-input-accessor.hh +++ /dev/null @@ -1,21 +0,0 @@ -#pragma once - -#include "input-accessor.hh" -#include "source-path.hh" - -namespace nix { - -class StorePath; -class Store; - -ref makeFSInputAccessor(); - -ref makeFSInputAccessor(std::filesystem::path root); - -ref makeStorePathAccessor( - ref store, - const StorePath & storePath); - -SourcePath getUnfilteredRootPath(CanonPath path); - -} diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index a4a00374c199..2ea1e15ed8b8 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -1,8 +1,4 @@ #include "git-utils.hh" -#include "fs-input-accessor.hh" -#include "input-accessor.hh" -#include "filtering-input-accessor.hh" -#include "memory-input-accessor.hh" #include "cache.hh" #include "finally.hh" #include "processes.hh" @@ -57,7 +53,7 @@ bool operator == (const git_oid & oid1, const git_oid & oid2) namespace nix { -struct GitInputAccessor; +struct GitSourceAccessor; // Some wrapper types that ensure that the git_*_free functions get called. template @@ -334,13 +330,13 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this } /** - * A 'GitInputAccessor' with no regard for export-ignore or any other transformations. + * A 'GitSourceAccessor' with no regard for export-ignore or any other transformations. */ - ref getRawAccessor(const Hash & rev); + ref getRawAccessor(const Hash & rev); - ref getAccessor(const Hash & rev, bool exportIgnore) override; + ref getAccessor(const Hash & rev, bool exportIgnore) override; - ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override; + ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override; ref getFileSystemObjectSink() override; @@ -385,7 +381,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this runProgram(RunOptions { .program = "git", - .searchPath = true, + .lookupPath = true, // FIXME: git stderr messes up our progress indicator, so // we're using --quiet for now. Should process its stderr. .args = gitArgs, @@ -456,7 +452,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this { auto accessor = getAccessor(treeHash, false); - fetchers::Attrs cacheKey({{"_what", "treeHashToNarHash"}, {"treeHash", treeHash.gitRev()}}); + fetchers::Cache::Key cacheKey{"treeHashToNarHash", {{"treeHash", treeHash.gitRev()}}}; if (auto res = fetchers::getCache()->lookup(cacheKey)) return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), HashAlgorithm::SHA256); @@ -477,12 +473,12 @@ ref GitRepo::openRepo(const std::filesystem::path & path, bool create, /** * Raw git tree input accessor. */ -struct GitInputAccessor : InputAccessor +struct GitSourceAccessor : SourceAccessor { ref repo; Tree root; - GitInputAccessor(ref repo_, const Hash & rev) + GitSourceAccessor(ref repo_, const Hash & rev) : repo(repo_) , root(peelObject(*repo, lookupObject(*repo, hashToOID(rev)).get(), GIT_OBJECT_TREE)) { @@ -706,12 +702,12 @@ struct GitInputAccessor : InputAccessor } }; -struct GitExportIgnoreInputAccessor : CachingFilteringInputAccessor { +struct GitExportIgnoreSourceAccessor : CachingFilteringSourceAccessor { ref repo; std::optional rev; - GitExportIgnoreInputAccessor(ref repo, ref next, std::optional rev) - : CachingFilteringInputAccessor(next, [&](const CanonPath & path) { + GitExportIgnoreSourceAccessor(ref repo, ref next, std::optional rev) + : CachingFilteringSourceAccessor(next, [&](const CanonPath & path) { return RestrictedPathError(fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path)); }) , repo(repo) @@ -922,40 +918,40 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink } }; -ref GitRepoImpl::getRawAccessor(const Hash & rev) +ref GitRepoImpl::getRawAccessor(const Hash & rev) { auto self = ref(shared_from_this()); - return make_ref(self, rev); + return make_ref(self, rev); } -ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) +ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) { auto self = ref(shared_from_this()); - ref rawGitAccessor = getRawAccessor(rev); + ref rawGitAccessor = getRawAccessor(rev); if (exportIgnore) { - return make_ref(self, rawGitAccessor, rev); + return make_ref(self, rawGitAccessor, rev); } else { return rawGitAccessor; } } -ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) +ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) { auto self = ref(shared_from_this()); /* In case of an empty workdir, return an empty in-memory tree. We - cannot use AllowListInputAccessor because it would return an + cannot use AllowListSourceAccessor because it would return an error for the root (and we can't add the root to the allow-list since that would allow access to all its children). */ - ref fileAccessor = + ref fileAccessor = wd.files.empty() - ? makeEmptyInputAccessor() - : AllowListInputAccessor::create( - makeFSInputAccessor(path), + ? makeEmptySourceAccessor() + : AllowListSourceAccessor::create( + makeFSSourceAccessor(path), std::set { wd.files }, - std::move(makeNotAllowedError)).cast(); + std::move(makeNotAllowedError)).cast(); if (exportIgnore) - return make_ref(self, fileAccessor, std::nullopt); + return make_ref(self, fileAccessor, std::nullopt); else return fileAccessor; } diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index 600a42da0642..29d799554805 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -1,7 +1,6 @@ #pragma once -#include "filtering-input-accessor.hh" -#include "input-accessor.hh" +#include "filtering-source-accessor.hh" #include "fs-sink.hh" namespace nix { @@ -75,9 +74,9 @@ struct GitRepo virtual bool hasObject(const Hash & oid) = 0; - virtual ref getAccessor(const Hash & rev, bool exportIgnore) = 0; + virtual ref getAccessor(const Hash & rev, bool exportIgnore) = 0; - virtual ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0; + virtual ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0; virtual ref getFileSystemObjectSink() = 0; diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 985f2e47991c..d62a7482e3b1 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -225,8 +225,8 @@ struct GitArchiveInputScheme : InputScheme auto cache = getCache(); - Attrs treeHashKey{{"_what", "gitRevToTreeHash"}, {"rev", rev->gitRev()}}; - Attrs lastModifiedKey{{"_what", "gitRevToLastModified"}, {"rev", rev->gitRev()}}; + Cache::Key treeHashKey{"gitRevToTreeHash", {{"rev", rev->gitRev()}}}; + Cache::Key lastModifiedKey{"gitRevToLastModified", {{"rev", rev->gitRev()}}}; if (auto treeHashAttrs = cache->lookup(treeHashKey)) { if (auto lastModifiedAttrs = cache->lookup(lastModifiedKey)) { @@ -272,7 +272,7 @@ struct GitArchiveInputScheme : InputScheme return {std::move(input), tarballInfo}; } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> getAccessor(ref store, const Input & _input) const override { auto [input, tarballInfo] = downloadArchive(store, _input); diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index 3f21445e101b..ba5078631389 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -97,7 +97,7 @@ struct IndirectInputScheme : InputScheme return input; } - std::pair, Input> getAccessor(ref store, const Input & input) const override + std::pair, Input> getAccessor(ref store, const Input & input) const override { throw Error("indirect input '%s' cannot be fetched directly", input.to_string()); } diff --git a/src/libfetchers/memory-input-accessor.cc b/src/libfetchers/memory-input-accessor.cc deleted file mode 100644 index 34a801f671ce..000000000000 --- a/src/libfetchers/memory-input-accessor.cc +++ /dev/null @@ -1,29 +0,0 @@ -#include "memory-input-accessor.hh" -#include "memory-source-accessor.hh" -#include "source-path.hh" - -namespace nix { - -struct MemoryInputAccessorImpl : MemoryInputAccessor, MemorySourceAccessor -{ - SourcePath addFile(CanonPath path, std::string && contents) override - { - return { - ref(shared_from_this()), - MemorySourceAccessor::addFile(path, std::move(contents)) - }; - } -}; - -ref makeMemoryInputAccessor() -{ - return make_ref(); -} - -ref makeEmptyInputAccessor() -{ - static auto empty = makeMemoryInputAccessor().cast(); - return empty; -} - -} diff --git a/src/libfetchers/memory-input-accessor.hh b/src/libfetchers/memory-input-accessor.hh deleted file mode 100644 index 63afadd2af53..000000000000 --- a/src/libfetchers/memory-input-accessor.hh +++ /dev/null @@ -1,18 +0,0 @@ -#include "input-accessor.hh" -#include "source-path.hh" - -namespace nix { - -/** - * An input accessor for an in-memory file system. - */ -struct MemoryInputAccessor : InputAccessor -{ - virtual SourcePath addFile(CanonPath path, std::string && contents) = 0; -}; - -ref makeMemoryInputAccessor(); - -ref makeEmptyInputAccessor(); - -} diff --git a/src/libfetchers/mounted-input-accessor.hh b/src/libfetchers/mounted-input-accessor.hh deleted file mode 100644 index b557c5dad7fb..000000000000 --- a/src/libfetchers/mounted-input-accessor.hh +++ /dev/null @@ -1,9 +0,0 @@ -#pragma once - -#include "input-accessor.hh" - -namespace nix { - -ref makeMountedInputAccessor(std::map> mounts); - -} diff --git a/src/libfetchers/mounted-input-accessor.cc b/src/libfetchers/mounted-source-accessor.cc similarity index 80% rename from src/libfetchers/mounted-input-accessor.cc rename to src/libfetchers/mounted-source-accessor.cc index b1eeaa97dbfb..68f3a546b57c 100644 --- a/src/libfetchers/mounted-input-accessor.cc +++ b/src/libfetchers/mounted-source-accessor.cc @@ -1,12 +1,12 @@ -#include "mounted-input-accessor.hh" +#include "mounted-source-accessor.hh" namespace nix { -struct MountedInputAccessor : InputAccessor +struct MountedSourceAccessor : SourceAccessor { - std::map> mounts; + std::map> mounts; - MountedInputAccessor(std::map> _mounts) + MountedSourceAccessor(std::map> _mounts) : mounts(std::move(_mounts)) { displayPrefix.clear(); @@ -53,7 +53,7 @@ struct MountedInputAccessor : InputAccessor return displayPrefix + accessor->showPath(subpath) + displaySuffix; } - std::pair, CanonPath> resolve(CanonPath path) + std::pair, CanonPath> resolve(CanonPath path) { // Find the nearest parent of `path` that is a mount point. std::vector subpath; @@ -71,9 +71,9 @@ struct MountedInputAccessor : InputAccessor } }; -ref makeMountedInputAccessor(std::map> mounts) +ref makeMountedSourceAccessor(std::map> mounts) { - return make_ref(std::move(mounts)); + return make_ref(std::move(mounts)); } } diff --git a/src/libfetchers/mounted-source-accessor.hh b/src/libfetchers/mounted-source-accessor.hh new file mode 100644 index 000000000000..45cbcb09a24f --- /dev/null +++ b/src/libfetchers/mounted-source-accessor.hh @@ -0,0 +1,9 @@ +#pragma once + +#include "source-accessor.hh" + +namespace nix { + +ref makeMountedSourceAccessor(std::map> mounts); + +} diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index 0af1bad7381c..68958d559719 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -1,8 +1,7 @@ #include "fetchers.hh" #include "store-api.hh" #include "archive.hh" -#include "fs-input-accessor.hh" -#include "posix-source-accessor.hh" +#include "store-path-accessor.hh" namespace nix::fetchers { @@ -54,6 +53,7 @@ struct PathInputScheme : InputScheme "narHash", }; } + std::optional inputFromAttrs(const Attrs & attrs) const override { getStrAttr(attrs, "path"); @@ -113,7 +113,7 @@ struct PathInputScheme : InputScheme throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string()); } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> getAccessor(ref store, const Input & _input) const override { Input input(_input); std::string absPath; diff --git a/src/libfetchers/store-path-accessor.cc b/src/libfetchers/store-path-accessor.cc new file mode 100644 index 000000000000..528bf2a4f517 --- /dev/null +++ b/src/libfetchers/store-path-accessor.cc @@ -0,0 +1,15 @@ +#include "store-path-accessor.hh" +#include "store-api.hh" + +namespace nix { + +ref makeStorePathAccessor(ref store, const StorePath & storePath) +{ + // FIXME: should use `store->getFSAccessor()` + auto root = std::filesystem::path{store->toRealPath(storePath)}; + auto accessor = makeFSSourceAccessor(root); + accessor->setPathDisplay(root.string()); + return accessor; +} + +} diff --git a/src/libfetchers/store-path-accessor.hh b/src/libfetchers/store-path-accessor.hh new file mode 100644 index 000000000000..989cf3fa29c1 --- /dev/null +++ b/src/libfetchers/store-path-accessor.hh @@ -0,0 +1,14 @@ +#pragma once + +#include "source-path.hh" + +namespace nix { + +class StorePath; +class Store; + +ref makeStorePathAccessor(ref store, const StorePath & storePath); + +SourcePath getUnfilteredRootPath(CanonPath path); + +} diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index a1f934c35df6..e19b1850560b 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -8,8 +8,7 @@ #include "tarfile.hh" #include "types.hh" #include "split.hh" -#include "posix-source-accessor.hh" -#include "fs-input-accessor.hh" +#include "store-path-accessor.hh" #include "store-api.hh" #include "git-utils.hh" @@ -23,21 +22,20 @@ DownloadFileResult downloadFile( { // FIXME: check store - Attrs inAttrs({ - {"type", "file"}, + Cache::Key key{"file", {{ {"url", url}, {"name", name}, - }); + }}}; - auto cached = getCache()->lookupExpired(*store, inAttrs); + auto cached = getCache()->lookupStorePath(key, *store); auto useCached = [&]() -> DownloadFileResult { return { .storePath = std::move(cached->storePath), - .etag = getStrAttr(cached->infoAttrs, "etag"), - .effectiveUrl = getStrAttr(cached->infoAttrs, "url"), - .immutableUrl = maybeGetStrAttr(cached->infoAttrs, "immutableUrl"), + .etag = getStrAttr(cached->value, "etag"), + .effectiveUrl = getStrAttr(cached->value, "url"), + .immutableUrl = maybeGetStrAttr(cached->value, "immutableUrl"), }; }; @@ -47,7 +45,7 @@ DownloadFileResult downloadFile( FileTransferRequest request(url); request.headers = headers; if (cached) - request.expectedETag = getStrAttr(cached->infoAttrs, "etag"); + request.expectedETag = getStrAttr(cached->value, "etag"); FileTransferResult res; try { res = getFileTransfer()->download(request); @@ -93,14 +91,9 @@ DownloadFileResult downloadFile( /* Cache metadata for all URLs in the redirect chain. */ for (auto & url : res.urls) { - inAttrs.insert_or_assign("url", url); + key.second.insert_or_assign("url", url); infoAttrs.insert_or_assign("url", *res.urls.rbegin()); - getCache()->add( - *store, - inAttrs, - infoAttrs, - *storePath, - false); + getCache()->upsert(key, *store, infoAttrs, *storePath); } return { @@ -115,12 +108,9 @@ DownloadTarballResult downloadTarball( const std::string & url, const Headers & headers) { - Attrs inAttrs({ - {"_what", "tarballCache"}, - {"url", url}, - }); + Cache::Key cacheKey{"tarball", {{"url", url}}}; - auto cached = getCache()->lookupExpired(inAttrs); + auto cached = getCache()->lookupExpired(cacheKey); auto attrsToResult = [&](const Attrs & infoAttrs) { @@ -133,19 +123,19 @@ DownloadTarballResult downloadTarball( }; }; - if (cached && !getTarballCache()->hasObject(getRevAttr(cached->infoAttrs, "treeHash"))) + if (cached && !getTarballCache()->hasObject(getRevAttr(cached->value, "treeHash"))) cached.reset(); if (cached && !cached->expired) /* We previously downloaded this tarball and it's younger than `tarballTtl`, so no need to check the server. */ - return attrsToResult(cached->infoAttrs); + return attrsToResult(cached->value); auto _res = std::make_shared>(); auto source = sinkToSource([&](Sink & sink) { FileTransferRequest req(url); - req.expectedETag = cached ? getStrAttr(cached->infoAttrs, "etag") : ""; + req.expectedETag = cached ? getStrAttr(cached->value, "etag") : ""; getFileTransfer()->download(std::move(req), sink, [_res](FileTransferResult r) { @@ -168,7 +158,7 @@ DownloadTarballResult downloadTarball( if (res->cached) { /* The server says that the previously downloaded version is still current. */ - infoAttrs = cached->infoAttrs; + infoAttrs = cached->value; } else { infoAttrs.insert_or_assign("etag", res->etag); infoAttrs.insert_or_assign("treeHash", parseSink->sync().gitRev()); @@ -179,8 +169,8 @@ DownloadTarballResult downloadTarball( /* Insert a cache entry for every URL in the redirect chain. */ for (auto & url : res->urls) { - inAttrs.insert_or_assign("url", url); - getCache()->upsert(inAttrs, infoAttrs); + cacheKey.second.insert_or_assign("url", url); + getCache()->upsert(cacheKey, infoAttrs); } // FIXME: add a cache entry for immutableUrl? That could allow @@ -297,7 +287,7 @@ struct FileInputScheme : CurlInputScheme : (!requireTree && !hasTarballExtension(url.path))); } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> getAccessor(ref store, const Input & _input) const override { auto input(_input); @@ -332,7 +322,7 @@ struct TarballInputScheme : CurlInputScheme : (requireTree || hasTarballExtension(url.path))); } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> getAccessor(ref store, const Input & _input) const override { auto input(_input); diff --git a/src/libfetchers/tarball.hh b/src/libfetchers/tarball.hh index bcb5dcc5ecf8..ba0dfd6230a4 100644 --- a/src/libfetchers/tarball.hh +++ b/src/libfetchers/tarball.hh @@ -8,7 +8,7 @@ namespace nix { class Store; -struct InputAccessor; +struct SourceAccessor; } namespace nix::fetchers { @@ -32,7 +32,7 @@ struct DownloadTarballResult Hash treeHash; time_t lastModified; std::optional immutableUrl; - ref accessor; + ref accessor; }; /** diff --git a/src/libfetchers/unix/git.cc b/src/libfetchers/unix/git.cc index 18915c0a7a07..fa7ef36211a0 100644 --- a/src/libfetchers/unix/git.cc +++ b/src/libfetchers/unix/git.cc @@ -9,8 +9,7 @@ #include "pathlocks.hh" #include "processes.hh" #include "git.hh" -#include "fs-input-accessor.hh" -#include "mounted-input-accessor.hh" +#include "mounted-source-accessor.hh" #include "git-utils.hh" #include "logging.hh" #include "finally.hh" @@ -343,7 +342,8 @@ struct GitInputScheme : InputScheme logger->pause(); Finally restoreLogger([]() { logger->resume(); }); runProgram("git", true, - { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-m", *commitMsg }); + { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-F", "-" }, + *commitMsg); } } } @@ -427,7 +427,7 @@ struct GitInputScheme : InputScheme uint64_t getLastModified(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const { - Attrs key{{"_what", "gitLastModified"}, {"rev", rev.gitRev()}}; + Cache::Key key{"gitLastModified", {{"rev", rev.gitRev()}}}; auto cache = getCache(); @@ -436,14 +436,14 @@ struct GitInputScheme : InputScheme auto lastModified = GitRepo::openRepo(repoDir)->getLastModified(rev); - cache->upsert(key, Attrs{{"lastModified", lastModified}}); + cache->upsert(key, {{"lastModified", lastModified}}); return lastModified; } uint64_t getRevCount(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const { - Attrs key{{"_what", "gitRevCount"}, {"rev", rev.gitRev()}}; + Cache::Key key{"gitRevCount", {{"rev", rev.gitRev()}}}; auto cache = getCache(); @@ -495,7 +495,7 @@ struct GitInputScheme : InputScheme } } - std::pair, Input> getAccessorFromCommit( + std::pair, Input> getAccessorFromCommit( ref store, RepoInfo & repoInfo, Input && input) const @@ -629,7 +629,7 @@ struct GitInputScheme : InputScheme input accessor consisting of the accessor for the top-level repo and the accessors for the submodules. */ if (getSubmodulesAttr(input)) { - std::map> mounts; + std::map> mounts; for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev, exportIgnore)) { auto resolved = repo->resolveSubmoduleUrl(submodule.url); @@ -642,6 +642,8 @@ struct GitInputScheme : InputScheme attrs.insert_or_assign("ref", submodule.branch); attrs.insert_or_assign("rev", submoduleRev.gitRev()); attrs.insert_or_assign("exportIgnore", Explicit{ exportIgnore }); + attrs.insert_or_assign("submodules", Explicit{ true }); + attrs.insert_or_assign("allRefs", Explicit{ true }); auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs)); auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(store); @@ -651,7 +653,7 @@ struct GitInputScheme : InputScheme if (!mounts.empty()) { mounts.insert_or_assign(CanonPath::root, accessor); - accessor = makeMountedInputAccessor(std::move(mounts)); + accessor = makeMountedSourceAccessor(std::move(mounts)); } } @@ -663,7 +665,7 @@ struct GitInputScheme : InputScheme return {accessor, std::move(input)}; } - std::pair, Input> getAccessorFromWorkdir( + std::pair, Input> getAccessorFromWorkdir( ref store, RepoInfo & repoInfo, Input && input) const @@ -677,7 +679,7 @@ struct GitInputScheme : InputScheme auto exportIgnore = getExportIgnoreAttr(input); - ref accessor = + ref accessor = repo->getAccessor(repoInfo.workdirInfo, exportIgnore, makeNotAllowedError(repoInfo.url)); @@ -688,7 +690,7 @@ struct GitInputScheme : InputScheme consisting of the accessor for the top-level repo and the accessors for the submodule workdirs. */ if (getSubmodulesAttr(input) && !repoInfo.workdirInfo.submodules.empty()) { - std::map> mounts; + std::map> mounts; for (auto & submodule : repoInfo.workdirInfo.submodules) { auto submodulePath = CanonPath(repoInfo.url) / submodule.path; @@ -696,6 +698,9 @@ struct GitInputScheme : InputScheme attrs.insert_or_assign("type", "git"); attrs.insert_or_assign("url", submodulePath.abs()); attrs.insert_or_assign("exportIgnore", Explicit{ exportIgnore }); + attrs.insert_or_assign("submodules", Explicit{ true }); + // TODO: fall back to getAccessorFromCommit-like fetch when submodules aren't checked out + // attrs.insert_or_assign("allRefs", Explicit{ true }); auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs)); auto [submoduleAccessor, submoduleInput2] = @@ -711,7 +716,7 @@ struct GitInputScheme : InputScheme } mounts.insert_or_assign(CanonPath::root, accessor); - accessor = makeMountedInputAccessor(std::move(mounts)); + accessor = makeMountedSourceAccessor(std::move(mounts)); } if (!repoInfo.workdirInfo.isDirty) { @@ -750,7 +755,7 @@ struct GitInputScheme : InputScheme return {accessor, std::move(input)}; } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> getAccessor(ref store, const Input & _input) const override { Input input(_input); diff --git a/src/libfetchers/unix/mercurial.cc b/src/libfetchers/unix/mercurial.cc index 4e0b26274feb..7bdf1e9375b5 100644 --- a/src/libfetchers/unix/mercurial.cc +++ b/src/libfetchers/unix/mercurial.cc @@ -6,8 +6,7 @@ #include "tarfile.hh" #include "store-api.hh" #include "url-parts.hh" -#include "fs-input-accessor.hh" -#include "posix-source-accessor.hh" +#include "store-path-accessor.hh" #include "fetch-settings.hh" #include @@ -24,7 +23,7 @@ static RunOptions hgOptions(const Strings & args) return { .program = "hg", - .searchPath = true, + .lookupPath = true, .args = args, .environment = env }; @@ -211,10 +210,9 @@ struct MercurialInputScheme : InputScheme return files.count(file); }; - PosixSourceAccessor accessor; auto storePath = store->addToStore( input.getName(), - accessor, CanonPath { actualPath }, + {getFSSourceAccessor(), CanonPath(actualPath)}, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, filter); @@ -224,22 +222,16 @@ struct MercurialInputScheme : InputScheme if (!input.getRef()) input.attrs.insert_or_assign("ref", "default"); - auto checkHashAlgorithm = [&](const std::optional & hash) + auto revInfoKey = [&](const Hash & rev) { - if (hash.has_value() && hash->algo != HashAlgorithm::SHA1) - throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true)); - }; - - - auto getLockedAttrs = [&]() - { - checkHashAlgorithm(input.getRev()); + if (rev.algo != HashAlgorithm::SHA1) + throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", rev.to_string(HashFormat::Base16, true)); - return Attrs({ - {"type", "hg"}, + return Cache::Key{"hgRev", { + {"store", store->storeDir}, {"name", name}, - {"rev", input.getRev()->gitRev()}, - }); + {"rev", input.getRev()->gitRev()} + }}; }; auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath @@ -250,26 +242,21 @@ struct MercurialInputScheme : InputScheme return storePath; }; - if (input.getRev()) { - if (auto res = getCache()->lookup(*store, getLockedAttrs())) - return makeResult(res->first, std::move(res->second)); - } - - auto revOrRef = input.getRev() ? input.getRev()->gitRev() : *input.getRef(); - - Attrs unlockedAttrs({ - {"type", "hg"}, - {"name", name}, + /* Check the cache for the most recent rev for this URL/ref. */ + Cache::Key refToRevKey{"hgRefToRev", { {"url", actualUrl}, - {"ref", *input.getRef()}, - }); + {"ref", *input.getRef()} + }}; - if (auto res = getCache()->lookup(*store, unlockedAttrs)) { - auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1); - if (!input.getRev() || input.getRev() == rev2) { - input.attrs.insert_or_assign("rev", rev2.gitRev()); - return makeResult(res->first, std::move(res->second)); - } + if (!input.getRev()) { + if (auto res = getCache()->lookupWithTTL(refToRevKey)) + input.attrs.insert_or_assign("rev", getRevAttr(*res, "rev").gitRev()); + } + + /* If we have a rev, check if we have a cached store path. */ + if (auto rev = input.getRev()) { + if (auto res = getCache()->lookupStorePath(revInfoKey(*rev), *store)) + return makeResult(res->value, res->storePath); } Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false)); @@ -302,51 +289,47 @@ struct MercurialInputScheme : InputScheme } } + /* Fetch the remote rev or ref. */ auto tokens = tokenizeString>( - runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" })); + runHg({ + "log", "-R", cacheDir, + "-r", input.getRev() ? input.getRev()->gitRev() : *input.getRef(), + "--template", "{node} {rev} {branch}" + })); assert(tokens.size() == 3); - input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashAlgorithm::SHA1).gitRev()); + auto rev = Hash::parseAny(tokens[0], HashAlgorithm::SHA1); + input.attrs.insert_or_assign("rev", rev.gitRev()); auto revCount = std::stoull(tokens[1]); input.attrs.insert_or_assign("ref", tokens[2]); - if (auto res = getCache()->lookup(*store, getLockedAttrs())) - return makeResult(res->first, std::move(res->second)); + /* Now that we have the rev, check the cache again for a + cached store path. */ + if (auto res = getCache()->lookupStorePath(revInfoKey(rev), *store)) + return makeResult(res->value, res->storePath); Path tmpDir = createTempDir(); AutoDelete delTmpDir(tmpDir, true); - runHg({ "archive", "-R", cacheDir, "-r", input.getRev()->gitRev(), tmpDir }); + runHg({ "archive", "-R", cacheDir, "-r", rev.gitRev(), tmpDir }); deletePath(tmpDir + "/.hg_archival.txt"); - PosixSourceAccessor accessor; - auto storePath = store->addToStore(name, accessor, CanonPath { tmpDir }); + auto storePath = store->addToStore(name, {getFSSourceAccessor(), CanonPath(tmpDir)}); Attrs infoAttrs({ - {"rev", input.getRev()->gitRev()}, {"revCount", (uint64_t) revCount}, }); if (!origRev) - getCache()->add( - *store, - unlockedAttrs, - infoAttrs, - storePath, - false); - - getCache()->add( - *store, - getLockedAttrs(), - infoAttrs, - storePath, - true); + getCache()->upsert(refToRevKey, {{"rev", rev.gitRev()}}); + + getCache()->upsert(revInfoKey(rev), *store, infoAttrs, storePath); return makeResult(infoAttrs, std::move(storePath)); } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> getAccessor(ref store, const Input & _input) const override { Input input(_input); diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index a43a00f1647e..c1c9362489be 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -113,7 +113,7 @@ static void sigHandler(int signo) { } #endif -void initNix() +void initNix(bool loadConfig) { /* Turn on buffering for cerr. */ #if HAVE_PUBSETBUF @@ -121,7 +121,7 @@ void initNix() std::cerr.rdbuf()->pubsetbuf(buf, sizeof(buf)); #endif - initLibStore(); + initLibStore(loadConfig); #ifndef _WIN32 unix::startSignalHandlerThread(); @@ -173,12 +173,13 @@ void initNix() everybody. */ umask(0022); -#ifndef _WIN32 /* Initialise the PRNG. */ struct timeval tv; gettimeofday(&tv, 0); +#ifndef _WIN32 srandom(tv.tv_usec); #endif + srand(tv.tv_usec); } diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index 3c657d2b7b79..aa44e1321fa3 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -21,8 +21,9 @@ int handleExceptions(const std::string & programName, std::function fun) /** * Don't forget to call initPlugins() after settings are initialized! + * @param loadConfig Whether to load configuration from `nix.conf`, `NIX_CONFIG`, etc. May be disabled for unit tests. */ -void initNix(); +void initNix(bool loadConfig = true); void parseCmdLine(int argc, char * * argv, std::function parseArg); diff --git a/src/libstore-c/nix_api_store.cc b/src/libstore-c/nix_api_store.cc index 6ce4d01bbdf5..4fe25c7d4eed 100644 --- a/src/libstore-c/nix_api_store.cc +++ b/src/libstore-c/nix_api_store.cc @@ -19,6 +19,16 @@ nix_err nix_libstore_init(nix_c_context * context) NIXC_CATCH_ERRS } +nix_err nix_libstore_init_no_load_config(nix_c_context * context) +{ + if (context) + context->last_err_code = NIX_OK; + try { + nix::initLibStore(false); + } + NIXC_CATCH_ERRS +} + nix_err nix_init_plugins(nix_c_context * context) { if (context) diff --git a/src/libstore-c/nix_api_store.h b/src/libstore-c/nix_api_store.h index c83aca3f797f..209f91f0dc64 100644 --- a/src/libstore-c/nix_api_store.h +++ b/src/libstore-c/nix_api_store.h @@ -35,6 +35,13 @@ typedef struct StorePath StorePath; */ nix_err nix_libstore_init(nix_c_context * context); +/** + * @brief Like nix_libstore_init, but does not load the Nix configuration. + * + * This is useful when external configuration is not desired, such as when running unit tests. + */ +nix_err nix_libstore_init_no_load_config(nix_c_context * context); + /** * @brief Loads the plugins specified in Nix's plugin-files setting. * diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 97b6ec0529ae..5153ca64fb17 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -442,8 +442,7 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath, StorePath BinaryCacheStore::addToStore( std::string_view name, - SourceAccessor & accessor, - const CanonPath & path, + const SourcePath & path, ContentAddressMethod method, HashAlgorithm hashAlgo, const StorePathSet & references, @@ -454,10 +453,10 @@ StorePath BinaryCacheStore::addToStore( non-recursive+sha256 so we can just use the default implementation of this method in terms of addToStoreFromDump. */ - auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter); + auto h = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter); auto source = sinkToSource([&](Sink & sink) { - accessor.dumpPath(path, sink, filter); + path.dumpPath(sink, filter); }); return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) { ValidPathInfo info { diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 7c282830933f..695bc9252774 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -133,8 +133,7 @@ public: StorePath addToStore( std::string_view name, - SourceAccessor & accessor, - const CanonPath & srcPath, + const SourcePath & path, ContentAddressMethod method, HashAlgorithm hashAlgo, const StorePathSet & references, diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc index e009f5b9dbfa..ab35c861d344 100644 --- a/src/libstore/builtins/buildenv.cc +++ b/src/libstore/builtins/buildenv.cc @@ -17,12 +17,12 @@ struct State /* For each activated package, create symlinks */ static void createLinks(State & state, const Path & srcDir, const Path & dstDir, int priority) { - DirEntries srcFiles; + std::filesystem::directory_iterator srcFiles; try { - srcFiles = readDirectory(srcDir); - } catch (SysError & e) { - if (e.errNo == ENOTDIR) { + srcFiles = std::filesystem::directory_iterator{srcDir}; + } catch (std::filesystem::filesystem_error & e) { + if (e.code() == std::errc::not_a_directory) { warn("not including '%s' in the user environment because it's not a directory", srcDir); return; } @@ -30,11 +30,12 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir, } for (const auto & ent : srcFiles) { - if (ent.name[0] == '.') + auto name = ent.path().filename(); + if (name.string()[0] == '.') /* not matched by glob */ continue; - auto srcFile = srcDir + "/" + ent.name; - auto dstFile = dstDir + "/" + ent.name; + auto srcFile = (std::filesystem::path{srcDir} / name).string(); + auto dstFile = (std::filesystem::path{dstDir} / name).string(); struct stat srcSt; try { diff --git a/src/libstore/unix/ca-specific-schema.sql b/src/libstore/ca-specific-schema.sql similarity index 100% rename from src/libstore/unix/ca-specific-schema.sql rename to src/libstore/ca-specific-schema.sql diff --git a/src/libstore/unix/gc.cc b/src/libstore/gc.cc similarity index 89% rename from src/libstore/unix/gc.cc rename to src/libstore/gc.cc index 9b2e6d525cdb..8286dff271d0 100644 --- a/src/libstore/unix/gc.cc +++ b/src/libstore/gc.cc @@ -7,7 +7,7 @@ #if !defined(__linux__) // For shelling out to lsof -# include "processes.hh" +# include "processes.hh" #endif #include @@ -19,36 +19,24 @@ #include #include #include -#include -#include #include -#include +#if HAVE_STATVFS +# include +#endif +#ifndef _WIN32 +# include +# include +# include +#endif #include -#include #include namespace nix { -using namespace nix::unix; - static std::string gcSocketPath = "/gc-socket/socket"; static std::string gcRootsDir = "gcroots"; -static void makeSymlink(const Path & link, const Path & target) -{ - /* Create directories up to `gcRoot'. */ - createDirs(dirOf(link)); - - /* Create the new symlink. */ - Path tempLink = fmt("%1%.tmp-%2%-%3%", link, getpid(), random()); - createSymlink(target, tempLink); - - /* Atomically replace the old one. */ - renameFile(tempLink, link); -} - - void LocalStore::addIndirectRoot(const Path & path) { std::string hash = hashString(HashAlgorithm::SHA1, path).to_string(HashFormat::Nix32, false); @@ -57,32 +45,6 @@ void LocalStore::addIndirectRoot(const Path & path) } -Path IndirectRootStore::addPermRoot(const StorePath & storePath, const Path & _gcRoot) -{ - Path gcRoot(canonPath(_gcRoot)); - - if (isInStore(gcRoot)) - throw Error( - "creating a garbage collector root (%1%) in the Nix store is forbidden " - "(are you running nix-build inside the store?)", gcRoot); - - /* Register this root with the garbage collector, if it's - running. This should be superfluous since the caller should - have registered this root yet, but let's be on the safe - side. */ - addTempRoot(storePath); - - /* Don't clobber the link if it already exists and doesn't - point to the Nix store. */ - if (pathExists(gcRoot) && (!isLink(gcRoot) || !isInStore(readLink(gcRoot)))) - throw Error("cannot create symlink '%1%'; already exists", gcRoot); - makeSymlink(gcRoot, printStorePath(storePath)); - addIndirectRoot(gcRoot); - - return gcRoot; -} - - void LocalStore::createTempRootsFile() { auto fdTempRoots(_fdTempRoots.lock()); @@ -104,7 +66,7 @@ void LocalStore::createTempRootsFile() /* Check whether the garbage collector didn't get in our way. */ struct stat st; - if (fstat(fdTempRoots->get(), &st) == -1) + if (fstat(fromDescriptorReadOnly(fdTempRoots->get()), &st) == -1) throw SysError("statting '%1%'", fnTempRoots); if (st.st_size == 0) break; @@ -148,7 +110,7 @@ void LocalStore::addTempRoot(const StorePath & path) debug("connecting to '%s'", socketPath); *fdRootsSocket = createUnixDomainSocket(); try { - nix::connect(fdRootsSocket->get(), socketPath); + nix::connect(toSocket(fdRootsSocket->get()), socketPath); } catch (SysError & e) { /* The garbage collector may have exited or not created the socket yet, so we need to restart. */ @@ -199,18 +161,23 @@ void LocalStore::findTempRoots(Roots & tempRoots, bool censor) { /* Read the `temproots' directory for per-process temporary root files. */ - for (auto & i : readDirectory(tempRootsDir)) { - if (i.name[0] == '.') { + for (auto & i : std::filesystem::directory_iterator{tempRootsDir}) { + auto name = i.path().filename().string(); + if (name[0] == '.') { // Ignore hidden files. Some package managers (notably portage) create // those to keep the directory alive. continue; } - Path path = tempRootsDir + "/" + i.name; + Path path = i.path().string(); - pid_t pid = std::stoi(i.name); + pid_t pid = std::stoi(name); debug("reading temporary root file '%1%'", path); - AutoCloseFD fd(open(path.c_str(), O_CLOEXEC | O_RDWR, 0666)); + AutoCloseFD fd(toDescriptor(open(path.c_str(), +#ifndef _WIN32 + O_CLOEXEC | +#endif + O_RDWR, 0666))); if (!fd) { /* It's okay if the file has disappeared. */ if (errno == ENOENT) continue; @@ -243,7 +210,7 @@ void LocalStore::findTempRoots(Roots & tempRoots, bool censor) } -void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots) +void LocalStore::findRoots(const Path & path, std::filesystem::file_type type, Roots & roots) { auto foundRoot = [&](const Path & path, const Path & target) { try { @@ -257,15 +224,15 @@ void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots) try { - if (type == DT_UNKNOWN) - type = getFileType(path); + if (type == std::filesystem::file_type::unknown) + type = std::filesystem::symlink_status(path).type(); - if (type == DT_DIR) { - for (auto & i : readDirectory(path)) - findRoots(path + "/" + i.name, i.type, roots); + if (type == std::filesystem::file_type::directory) { + for (auto & i : std::filesystem::directory_iterator{path}) + findRoots(i.path().string(), i.symlink_status().type(), roots); } - else if (type == DT_LNK) { + else if (type == std::filesystem::file_type::symlink) { Path target = readLink(path); if (isInStore(target)) foundRoot(path, target); @@ -279,15 +246,14 @@ void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots) unlink(path.c_str()); } } else { - struct stat st2 = lstat(target); - if (!S_ISLNK(st2.st_mode)) return; + if (!std::filesystem::is_symlink(target)) return; Path target2 = readLink(target); if (isInStore(target2)) foundRoot(target, target2); } } } - else if (type == DT_REG) { + else if (type == std::filesystem::file_type::regular) { auto storePath = maybeParseStorePath(storeDir + "/" + std::string(baseNameOf(path))); if (storePath && isValidPath(*storePath)) roots[std::move(*storePath)].emplace(path); @@ -295,6 +261,14 @@ void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots) } + catch (std::filesystem::filesystem_error & e) { + /* We only ignore permanent failures. */ + if (e.code() == std::errc::permission_denied || e.code() == std::errc::no_such_file_or_directory || e.code() == std::errc::not_a_directory) + printInfo("cannot read potential root '%1%'", path); + else + throw; + } + catch (SysError & e) { /* We only ignore permanent failures. */ if (e.errNo == EACCES || e.errNo == ENOENT || e.errNo == ENOTDIR) @@ -308,8 +282,8 @@ void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots) void LocalStore::findRootsNoTemp(Roots & roots, bool censor) { /* Process direct roots in {gcroots,profiles}. */ - findRoots(stateDir + "/" + gcRootsDir, DT_UNKNOWN, roots); - findRoots(stateDir + "/profiles", DT_UNKNOWN, roots); + findRoots(stateDir + "/" + gcRootsDir, std::filesystem::file_type::unknown, roots); + findRoots(stateDir + "/profiles", std::filesystem::file_type::unknown, roots); /* Add additional roots returned by different platforms-specific heuristics. This is typically used to add running programs to @@ -328,24 +302,25 @@ Roots LocalStore::findRoots(bool censor) return roots; } -typedef std::unordered_map> UncheckedRoots; +/** + * Key is a mere string because cannot has path with macOS's libc++ + */ +typedef std::unordered_map> UncheckedRoots; -static void readProcLink(const std::string & file, UncheckedRoots & roots) +static void readProcLink(const std::filesystem::path & file, UncheckedRoots & roots) { - constexpr auto bufsiz = PATH_MAX; - char buf[bufsiz]; - auto res = readlink(file.c_str(), buf, bufsiz); - if (res == -1) { - if (errno == ENOENT || errno == EACCES || errno == ESRCH) + std::filesystem::path buf; + try { + buf = std::filesystem::read_symlink(file); + } catch (std::filesystem::filesystem_error & e) { + if (e.code() == std::errc::no_such_file_or_directory + || e.code() == std::errc::permission_denied + || e.code() == std::errc::no_such_process) return; - throw SysError("reading symlink"); - } - if (res == bufsiz) { - throw Error("overly long symlink starting with '%1%'", std::string_view(buf, bufsiz)); + throw; } - if (res > 0 && buf[0] == '/') - roots[std::string(static_cast(buf), res)] - .emplace(file); + if (buf.is_absolute()) + roots[buf.string()].emplace(file.string()); } static std::string quoteRegexChars(const std::string & raw) @@ -402,12 +377,12 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor) } fdDir.reset(); - auto mapFile = fmt("/proc/%s/maps", ent->d_name); - auto mapLines = tokenizeString>(readFile(mapFile), "\n"); + std::filesystem::path mapFile = fmt("/proc/%s/maps", ent->d_name); + auto mapLines = tokenizeString>(readFile(mapFile.string()), "\n"); for (const auto & line : mapLines) { auto match = std::smatch{}; if (std::regex_match(line, match, mapRegex)) - unchecked[match[1]].emplace(mapFile); + unchecked[match[1]].emplace(mapFile.string()); } auto envFile = fmt("/proc/%s/environ", ent->d_name); @@ -438,7 +413,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor) for (const auto & line : lsofLines) { std::smatch match; if (std::regex_match(line, match, lsofRegex)) - unchecked[match[1]].emplace("{lsof}"); + unchecked[match[1].str()].emplace("{lsof}"); } } catch (ExecError & e) { /* lsof not installed, lsof failed */ @@ -521,6 +496,10 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) createDirs(dirOf(socketPath)); auto fdServer = createUnixDomainSocket(socketPath, 0666); + // TODO nonblocking socket on windows? +#ifdef _WIN32 + throw UnimplementedError("External GC client not implemented yet"); +#else if (fcntl(fdServer.get(), F_SETFL, fcntl(fdServer.get(), F_GETFL) | O_NONBLOCK) == -1) throw SysError("making socket '%1%' non-blocking", socketPath); @@ -621,6 +600,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) if (serverThread.joinable()) serverThread.join(); }); +#endif + /* Find the roots. Since we've grabbed the GC lock, the set of permanent roots cannot increase now. */ printInfo("finding garbage collector roots..."); @@ -654,8 +635,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) by another process. We need to be sure that we can acquire an exclusive lock before deleting them. */ if (baseName.find("tmp-", 0) == 0) { - AutoCloseFD tmpDirFd = open(realPath.c_str(), O_RDONLY | O_DIRECTORY); - if (tmpDirFd.get() == -1 || !lockFile(tmpDirFd.get(), ltWrite, false)) { + AutoCloseFD tmpDirFd = openDirectory(realPath); + if (!tmpDirFd || !lockFile(tmpDirFd.get(), ltWrite, false)) { debug("skipping locked tempdir '%s'", realPath); return; } @@ -888,7 +869,13 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) struct stat st; if (stat(linksDir.c_str(), &st) == -1) throw SysError("statting '%1%'", linksDir); - int64_t overhead = st.st_blocks * 512ULL; + int64_t overhead = +#ifdef _WIN32 + 0 +#else + st.st_blocks * 512ULL +#endif + ; printInfo("note: currently hard linking saves %.2f MiB", ((unsharedSize - actualSize - overhead) / (1024.0 * 1024.0))); @@ -901,6 +888,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) void LocalStore::autoGC(bool sync) { +#ifdef HAVE_STATVFS static auto fakeFreeSpaceFile = getEnv("_NIX_TEST_FREE_SPACE_FILE"); auto getAvail = [this]() -> uint64_t { @@ -977,6 +965,7 @@ void LocalStore::autoGC(bool sync) sync: // Wait for the future outside of the state lock. if (sync) future.get(); +#endif } diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 83e54e008f12..d9cab2fb8bfe 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -343,13 +343,13 @@ void initPlugins() { assert(!settings.pluginFiles.pluginsLoaded); for (const auto & pluginFile : settings.pluginFiles.get()) { - Paths pluginFiles; + std::vector pluginFiles; try { - auto ents = readDirectory(pluginFile); + auto ents = std::filesystem::directory_iterator{pluginFile}; for (const auto & ent : ents) - pluginFiles.emplace_back(pluginFile + "/" + ent.name); - } catch (SysError & e) { - if (e.errNo != ENOTDIR) + pluginFiles.emplace_back(ent.path()); + } catch (std::filesystem::filesystem_error & e) { + if (e.code() != std::errc::not_a_directory) throw; pluginFiles.emplace_back(pluginFile); } @@ -427,12 +427,13 @@ void assertLibStoreInitialized() { }; } -void initLibStore() { +void initLibStore(bool loadConfig) { if (initLibStoreDone) return; initLibUtil(); - loadConfFile(); + if (loadConfig) + loadConfFile(); preloadNSS(); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 852dba76413a..108933422190 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -424,8 +424,10 @@ public: Setting useSQLiteWAL{this, !isWSL1(), "use-sqlite-wal", "Whether SQLite should use WAL mode."}; +#ifndef _WIN32 Setting syncBeforeRegistering{this, false, "sync-before-registering", "Whether to call `sync()` before registering a path as valid."}; +#endif Setting useSubstitutes{ this, true, "substitute", @@ -782,6 +784,7 @@ public: - the store object has been signed using a key in the trusted keys list - the [`require-sigs`](#conf-require-sigs) option has been set to `false` + - the store URL is configured with `trusted=true` - the store object is [content-addressed](@docroot@/glossary.md#gloss-content-addressed-store-object) )", {"binary-cache-public-keys"}}; @@ -1279,9 +1282,10 @@ std::vector getUserConfigFiles(); extern const std::string nixVersion; /** - * NB: This is not sufficient. You need to call initNix() + * @param loadConfig Whether to load configuration from `nix.conf`, `NIX_CONFIG`, etc. May be disabled for unit tests. + * @note When using libexpr, and/or libmain, This is not sufficient. See initNix(). */ -void initLibStore(); +void initLibStore(bool loadConfig = true); /** * It's important to initialize before doing _anything_, which is why we diff --git a/src/libstore/indirect-root-store.cc b/src/libstore/indirect-root-store.cc new file mode 100644 index 000000000000..844d0d6edad4 --- /dev/null +++ b/src/libstore/indirect-root-store.cc @@ -0,0 +1,45 @@ +#include "indirect-root-store.hh" + +namespace nix { + +void IndirectRootStore::makeSymlink(const Path & link, const Path & target) +{ + /* Create directories up to `gcRoot'. */ + createDirs(dirOf(link)); + + /* Create the new symlink. */ + Path tempLink = fmt("%1%.tmp-%2%-%3%", link, getpid(), rand()); + createSymlink(target, tempLink); + + /* Atomically replace the old one. */ + std::filesystem::rename(tempLink, link); +} + +Path IndirectRootStore::addPermRoot(const StorePath & storePath, const Path & _gcRoot) +{ + Path gcRoot(canonPath(_gcRoot)); + + if (isInStore(gcRoot)) + throw Error( + "creating a garbage collector root (%1%) in the Nix store is forbidden " + "(are you running nix-build inside the store?)", + gcRoot); + + /* Register this root with the garbage collector, if it's + running. This should be superfluous since the caller should + have registered this root yet, but let's be on the safe + side. */ + addTempRoot(storePath); + + /* Don't clobber the link if it already exists and doesn't + point to the Nix store. */ + if (pathExists(gcRoot) && (!std::filesystem::is_symlink(gcRoot) || !isInStore(readLink(gcRoot)))) + throw Error("cannot create symlink '%1%'; already exists", gcRoot); + + makeSymlink(gcRoot, printStorePath(storePath)); + addIndirectRoot(gcRoot); + + return gcRoot; +} + +} diff --git a/src/libstore/indirect-root-store.hh b/src/libstore/indirect-root-store.hh index c11679fe8b8e..b74ebc1eed47 100644 --- a/src/libstore/indirect-root-store.hh +++ b/src/libstore/indirect-root-store.hh @@ -67,6 +67,9 @@ struct IndirectRootStore : public virtual LocalFSStore * The form this weak-reference takes is implementation-specific. */ virtual void addIndirectRoot(const Path & path) = 0; + +protected: + void makeSymlink(const Path & link, const Path & target); }; } diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh index ca2f115d25e4..343823693660 100644 --- a/src/libstore/legacy-ssh-store.hh +++ b/src/libstore/legacy-ssh-store.hh @@ -60,8 +60,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor StorePath addToStore( std::string_view name, - SourceAccessor & accessor, - const CanonPath & srcPath, + const SourcePath & path, ContentAddressMethod method, HashAlgorithm hashAlgo, const StorePathSet & references, diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index 5481dd762e27..87a6026f1afe 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -64,7 +64,7 @@ class LocalBinaryCacheStore : public virtual LocalBinaryCacheStoreConfig, public AutoDelete del(tmp, false); StreamToSourceAdapter source(istream); writeFile(tmp, source); - renameFile(tmp, path2); + std::filesystem::rename(tmp, path2); del.cancel(); } @@ -83,12 +83,13 @@ class LocalBinaryCacheStore : public virtual LocalBinaryCacheStoreConfig, public { StorePathSet paths; - for (auto & entry : readDirectory(binaryCacheDir)) { - if (entry.name.size() != 40 || - !hasSuffix(entry.name, ".narinfo")) + for (auto & entry : std::filesystem::directory_iterator{binaryCacheDir}) { + auto name = entry.path().filename().string(); + if (name.size() != 40 || + !hasSuffix(name, ".narinfo")) continue; paths.insert(parseStorePath( - storeDir + "/" + entry.name.substr(0, entry.name.size() - 8) + storeDir + "/" + name.substr(0, name.size() - 8) + "-" + MissingName)); } diff --git a/src/libstore/unix/local-store.cc b/src/libstore/local-store.cc similarity index 96% rename from src/libstore/unix/local-store.cc rename to src/libstore/local-store.cc index 1593affd608a..dd06e5b6579d 100644 --- a/src/libstore/unix/local-store.cc +++ b/src/libstore/local-store.cc @@ -26,7 +26,6 @@ #include #include #include -#include #include #include #include @@ -34,17 +33,20 @@ #include #include #include -#include + +#ifndef _WIN32 +# include +#endif #if __linux__ -#include -#include -#include -#include +# include +# include +# include +# include #endif #ifdef __CYGWIN__ -#include +# include #endif #include @@ -52,8 +54,6 @@ namespace nix { -using namespace nix::unix; - std::string LocalStoreConfig::doc() { return @@ -224,6 +224,7 @@ LocalStore::LocalStore(const Params & params) } } +#ifndef _WIN32 /* Optionally, create directories and set permissions for a multi-user install. */ if (isRootUser() && settings.buildUsersGroup != "") { @@ -245,6 +246,7 @@ LocalStore::LocalStore(const Params & params) } } } +#endif /* Ensure that the store and its parents are not symlinks. */ if (!settings.allowSymlinkedStore) { @@ -270,14 +272,25 @@ LocalStore::LocalStore(const Params & params) if (stat(reservedPath.c_str(), &st) == -1 || st.st_size != settings.reservedSize) { - AutoCloseFD fd = open(reservedPath.c_str(), O_WRONLY | O_CREAT | O_CLOEXEC, 0600); + AutoCloseFD fd = toDescriptor(open(reservedPath.c_str(), O_WRONLY | O_CREAT +#ifndef _WIN32 + | O_CLOEXEC +#endif + , 0600)); int res = -1; #if HAVE_POSIX_FALLOCATE res = posix_fallocate(fd.get(), 0, settings.reservedSize); #endif if (res == -1) { writeFull(fd.get(), std::string(settings.reservedSize, 'X')); - [[gnu::unused]] auto res2 = ftruncate(fd.get(), settings.reservedSize); + [[gnu::unused]] auto res2 = + +#ifdef _WIN32 + SetEndOfFile(fd.get()) +#else + ftruncate(fd.get(), settings.reservedSize) +#endif + ; } } } catch (SystemError & e) { /* don't care about errors */ @@ -460,10 +473,14 @@ LocalStore::LocalStore(std::string scheme, std::string path, const Params & para AutoCloseFD LocalStore::openGCLock() { Path fnGCLock = stateDir + "/gc.lock"; - auto fdGCLock = open(fnGCLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600); + auto fdGCLock = open(fnGCLock.c_str(), O_RDWR | O_CREAT +#ifndef _WIN32 + | O_CLOEXEC +#endif + , 0600); if (!fdGCLock) throw SysError("opening global GC lock '%1%'", fnGCLock); - return fdGCLock; + return toDescriptor(fdGCLock); } @@ -491,7 +508,7 @@ LocalStore::~LocalStore() try { auto fdTempRoots(_fdTempRoots.lock()); if (*fdTempRoots) { - *fdTempRoots = -1; + fdTempRoots->close(); unlink(fnTempRoots.c_str()); } } catch (...) { @@ -969,11 +986,13 @@ void LocalStore::registerValidPath(const ValidPathInfo & info) void LocalStore::registerValidPaths(const ValidPathInfos & infos) { +#ifndef _WIN32 /* SQLite will fsync by default, but the new valid paths may not be fsync-ed. So some may want to fsync them before registering the validity, at the expense of some speed of the path registering operation. */ if (settings.syncBeforeRegistering) sync(); +#endif return retrySQLite([&]() { auto state(_state.lock()); @@ -1132,12 +1151,12 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, specified.hash.algo, std::string { info.path.hashPart() }, }; - dumpPath(*accessor, path, caSink, (FileSerialisationMethod) fim); + dumpPath({accessor, path}, caSink, (FileSerialisationMethod) fim); h = caSink.finish().first; break; } case FileIngestionMethod::Git: - h = git::dumpHash(specified.hash.algo, *accessor, path).hash; + h = git::dumpHash(specified.hash.algo, {accessor, path}).hash; break; } ContentAddress { @@ -1155,7 +1174,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, autoGC(); - canonicalisePathMetaData(realPath, {}); + canonicalisePathMetaData(realPath); optimisePath(realPath, repair); // FIXME: combine with hashPath() @@ -1220,8 +1239,8 @@ StorePath LocalStore::addToStoreFromDump( } std::unique_ptr delTempDir; - Path tempPath; - Path tempDir; + std::filesystem::path tempPath; + std::filesystem::path tempDir; AutoCloseFD tempDirFd; bool methodsMatch = ContentAddressMethod(FileIngestionMethod(dumpMethod)) == hashMethod; @@ -1237,9 +1256,9 @@ StorePath LocalStore::addToStoreFromDump( std::tie(tempDir, tempDirFd) = createTempDirInStore(); delTempDir = std::make_unique(tempDir); - tempPath = tempDir + "/x"; + tempPath = tempDir / "x"; - restorePath(tempPath, bothSource, dumpMethod); + restorePath(tempPath.string(), bothSource, dumpMethod); dumpBuffer.reset(); dump = {}; @@ -1247,14 +1266,12 @@ StorePath LocalStore::addToStoreFromDump( auto [dumpHash, size] = hashSink->finish(); - PosixSourceAccessor accessor; - auto desc = ContentAddressWithReferences::fromParts( hashMethod, methodsMatch ? dumpHash : hashPath( - accessor, CanonPath { tempPath }, + PosixSourceAccessor::createAtRoot(tempPath), hashMethod.getFileIngestionMethod(), hashAlgo), { .others = references, @@ -1297,7 +1314,7 @@ StorePath LocalStore::addToStoreFromDump( } } else { /* Move the temporary path we restored above. */ - moveFile(tempPath, realPath); + moveFile(tempPath.string(), realPath); } /* For computing the nar hash. In recursive SHA-256 mode, this @@ -1309,7 +1326,7 @@ StorePath LocalStore::addToStoreFromDump( narHash = narSink.finish(); } - canonicalisePathMetaData(realPath, {}); // FIXME: merge into restorePath + canonicalisePathMetaData(realPath); // FIXME: merge into restorePath optimisePath(realPath, repair); @@ -1332,9 +1349,9 @@ StorePath LocalStore::addToStoreFromDump( /* Create a temporary directory in the store that won't be garbage-collected until the returned FD is closed. */ -std::pair LocalStore::createTempDirInStore() +std::pair LocalStore::createTempDirInStore() { - Path tmpDirFn; + std::filesystem::path tmpDirFn; AutoCloseFD tmpDirFd; bool lockedByUs = false; do { @@ -1342,12 +1359,12 @@ std::pair LocalStore::createTempDirInStore() the GC between createTempDir() and when we acquire a lock on it. We'll repeat until 'tmpDir' exists and we've locked it. */ tmpDirFn = createTempDir(realStoreDir, "tmp"); - tmpDirFd = open(tmpDirFn.c_str(), O_RDONLY | O_DIRECTORY); - if (tmpDirFd.get() < 0) { + tmpDirFd = openDirectory(tmpDirFn); + if (!tmpDirFd) { continue; } lockedByUs = lockFile(tmpDirFd.get(), ltWrite, true); - } while (!pathExists(tmpDirFn) || !lockedByUs); + } while (!pathExists(tmpDirFn.string()) || !lockedByUs); return {tmpDirFn, std::move(tmpDirFd)}; } @@ -1389,21 +1406,19 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) printInfo("checking link hashes..."); - for (auto & link : readDirectory(linksDir)) { - printMsg(lvlTalkative, "checking contents of '%s'", link.name); - Path linkPath = linksDir + "/" + link.name; + for (auto & link : std::filesystem::directory_iterator{linksDir}) { + auto name = link.path().filename(); + printMsg(lvlTalkative, "checking contents of '%s'", name); PosixSourceAccessor accessor; std::string hash = hashPath( - accessor, CanonPath { linkPath }, + PosixSourceAccessor::createAtRoot(link.path()), FileIngestionMethod::Recursive, HashAlgorithm::SHA256).to_string(HashFormat::Nix32, false); - if (hash != link.name) { + if (hash != name.string()) { printError("link '%s' was modified! expected hash '%s', got '%s'", - linkPath, link.name, hash); + link.path(), name, hash); if (repair) { - if (unlink(linkPath.c_str()) == 0) - printInfo("removed link '%s'", linkPath); - else - throw SysError("removing corrupt link '%s'", linkPath); + std::filesystem::remove(link.path()); + printInfo("removed link '%s'", link.path()); } else { errors = true; } @@ -1483,9 +1498,9 @@ LocalStore::VerificationResult LocalStore::verifyAllValidPaths(RepairFlag repair database and the filesystem) in the loop below, in order to catch invalid states. */ - for (auto & i : readDirectory(realStoreDir)) { + for (auto & i : std::filesystem::directory_iterator{realStoreDir.to_string()}) { try { - storePathsInStoreDir.insert({i.name}); + storePathsInStoreDir.insert({i.path().filename().string()}); } catch (BadStorePath &) { } } @@ -1584,8 +1599,12 @@ static void makeMutable(const Path & path) /* The O_NOFOLLOW is important to prevent us from changing the mutable bit on the target of a symlink (which would be a security hole). */ - AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_NOFOLLOW | O_CLOEXEC); - if (fd == -1) { + AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_NOFOLLOW +#ifndef _WIN32 + | O_CLOEXEC +#endif + ); + if (fd == INVALID_DESCRIPTOR) { if (errno == ELOOP) return; // it's a symlink throw SysError("opening file '%1%'", path); } @@ -1760,7 +1779,7 @@ void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log) writeFile(tmpFile, compress("bzip2", log)); - renameFile(tmpFile, logPath); + std::filesystem::rename(tmpFile, logPath); } std::optional LocalStore::getVersion() diff --git a/src/libstore/unix/local-store.hh b/src/libstore/local-store.hh similarity index 98% rename from src/libstore/unix/local-store.hh rename to src/libstore/local-store.hh index 47d3c04bc787..b3d7bd6d0c31 100644 --- a/src/libstore/unix/local-store.hh +++ b/src/libstore/local-store.hh @@ -32,7 +32,6 @@ struct OptimiseStats { unsigned long filesLinked = 0; uint64_t bytesFreed = 0; - uint64_t blocksFreed = 0; }; struct LocalStoreConfig : virtual LocalFSStoreConfig @@ -371,13 +370,13 @@ private: PathSet queryValidPathsOld(); ValidPathInfo queryPathInfoOld(const Path & path); - void findRoots(const Path & path, unsigned char type, Roots & roots); + void findRoots(const Path & path, std::filesystem::file_type type, Roots & roots); void findRootsNoTemp(Roots & roots, bool censor); void findRuntimeRoots(Roots & roots, bool censor); - std::pair createTempDirInStore(); + std::pair createTempDirInStore(); typedef std::unordered_set InodeHash; diff --git a/src/libstore/unix/local-store.md b/src/libstore/local-store.md similarity index 100% rename from src/libstore/unix/local-store.md rename to src/libstore/local-store.md diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 2e118f6cb2c3..cc67da786591 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -21,6 +21,9 @@ libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(THREAD_LDFLAGS) ifdef HOST_LINUX libstore_LDFLAGS += -ldl endif +ifdef HOST_WINDOWS + libstore_LDFLAGS += -lws2_32 +endif $(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox))) @@ -86,11 +89,11 @@ else endif endif -$(d)/unix/local-store.cc: $(d)/unix/schema.sql.gen.hh $(d)/unix/ca-specific-schema.sql.gen.hh +$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh $(d)/unix/build.cc: -clean-files += $(d)/unix/schema.sql.gen.hh $(d)/unix/ca-specific-schema.sql.gen.hh +clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh $(eval $(call install-file-in, $(buildprefix)$(d)/nix-store.pc, $(libdir)/pkgconfig, 0644)) diff --git a/src/libstore/unix/optimise-store.cc b/src/libstore/optimise-store.cc similarity index 80% rename from src/libstore/unix/optimise-store.cc rename to src/libstore/optimise-store.cc index daaaaf0733e2..2477cf0c0206 100644 --- a/src/libstore/unix/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -148,64 +148,60 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, contents of the symlink (i.e. the result of readlink()), not the contents of the target (which may not even exist). */ Hash hash = ({ - PosixSourceAccessor accessor; hashPath( - accessor, CanonPath { path }, + {make_ref(), CanonPath(path)}, FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first; }); debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true)); /* Check if this is a known hash. */ - Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Nix32, false); + std::filesystem::path linkPath = std::filesystem::path{linksDir} / hash.to_string(HashFormat::Nix32, false); /* Maybe delete the link, if it has been corrupted. */ - if (pathExists(linkPath)) { - auto stLink = lstat(linkPath); + if (std::filesystem::exists(std::filesystem::symlink_status(linkPath))) { + auto stLink = lstat(linkPath.string()); if (st.st_size != stLink.st_size || (repair && hash != ({ - PosixSourceAccessor accessor; hashPath( - accessor, CanonPath { linkPath }, + PosixSourceAccessor::createAtRoot(linkPath), FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first; }))) { // XXX: Consider overwriting linkPath with our valid version. - warn("removing corrupted link '%s'", linkPath); + warn("removing corrupted link %s", linkPath); warn("There may be more corrupted paths." "\nYou should run `nix-store --verify --check-contents --repair` to fix them all"); - unlink(linkPath.c_str()); + std::filesystem::remove(linkPath); } } - if (!pathExists(linkPath)) { + if (!std::filesystem::exists(std::filesystem::symlink_status(linkPath))) { /* Nope, create a hard link in the links directory. */ - if (link(path.c_str(), linkPath.c_str()) == 0) { + try { + std::filesystem::create_hard_link(path, linkPath); inodeHash.insert(st.st_ino); - return; - } - - switch (errno) { - case EEXIST: - /* Fall through if another process created ‘linkPath’ before - we did. */ - break; - - case ENOSPC: - /* On ext4, that probably means the directory index is - full. When that happens, it's fine to ignore it: we - just effectively disable deduplication of this - file. */ - printInfo("cannot link '%s' to '%s': %s", linkPath, path, strerror(errno)); - return; - - default: - throw SysError("cannot link '%1%' to '%2%'", linkPath, path); + } catch (std::filesystem::filesystem_error & e) { + if (e.code() == std::errc::file_exists) { + /* Fall through if another process created ‘linkPath’ before + we did. */ + } + + else if (e.code() == std::errc::no_space_on_device) { + /* On ext4, that probably means the directory index is + full. When that happens, it's fine to ignore it: we + just effectively disable deduplication of this + file. */ + printInfo("cannot link '%s' to '%s': %s", linkPath, path, strerror(errno)); + return; + } + + else throw; } } /* Yes! We've seen a file with the same contents. Replace the current file with a hard link to that file. */ - auto stLink = lstat(linkPath); + auto stLink = lstat(linkPath.string()); if (st.st_ino == stLink.st_ino) { debug("'%1%' is already linked to '%2%'", path, linkPath); @@ -225,10 +221,13 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, its timestamp back to 0. */ MakeReadOnly makeReadOnly(mustToggle ? dirOfPath : ""); - Path tempLink = fmt("%1%/.tmp-link-%2%-%3%", realStoreDir, getpid(), random()); + std::filesystem::path tempLink = fmt("%1%/.tmp-link-%2%-%3%", realStoreDir, getpid(), rand()); - if (link(linkPath.c_str(), tempLink.c_str()) == -1) { - if (errno == EMLINK) { + try { + std::filesystem::create_hard_link(linkPath, tempLink); + inodeHash.insert(st.st_ino); + } catch (std::filesystem::filesystem_error & e) { + if (e.code() == std::errc::too_many_links) { /* Too many links to the same file (>= 32000 on most file systems). This is likely to happen with empty files. Just shrug and ignore. */ @@ -236,16 +235,16 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, printInfo("'%1%' has maximum number of links", linkPath); return; } - throw SysError("cannot link '%1%' to '%2%'", tempLink, linkPath); + throw; } /* Atomically replace the old file with the new hard link. */ try { - renameFile(tempLink, path); - } catch (SystemError & e) { - if (unlink(tempLink.c_str()) == -1) + std::filesystem::rename(tempLink, path); + } catch (std::filesystem::filesystem_error & e) { + std::filesystem::remove(tempLink); printError("unable to unlink '%1%'", tempLink); - if (errno == EMLINK) { + if (e.code() == std::errc::too_many_links) { /* Some filesystems generate too many links on the rename, rather than on the original link. (Probably it temporarily increases the st_nlink field before @@ -258,10 +257,13 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, stats.filesLinked++; stats.bytesFreed += st.st_size; - stats.blocksFreed += st.st_blocks; if (act) - act->result(resFileLinked, st.st_size, st.st_blocks); + act->result(resFileLinked, st.st_size +#ifndef _WIN32 + , st.st_blocks +#endif + ); } diff --git a/src/libstore/pathlocks.hh b/src/libstore/pathlocks.hh index b97fbecb923d..42a84a1a37bb 100644 --- a/src/libstore/pathlocks.hh +++ b/src/libstore/pathlocks.hh @@ -5,10 +5,26 @@ namespace nix { +/** + * Open (possibly create) a lock file and return the file descriptor. + * -1 is returned if create is false and the lock could not be opened + * because it doesn't exist. Any other error throws an exception. + */ +AutoCloseFD openLockFile(const Path & path, bool create); + +/** + * Delete an open lock file. + */ +void deleteLockFile(const Path & path, Descriptor desc); + +enum LockType { ltRead, ltWrite, ltNone }; + +bool lockFile(Descriptor desc, LockType lockType, bool wait); + class PathLocks { private: - typedef std::pair FDPair; + typedef std::pair FDPair; std::list fds; bool deletePaths; @@ -24,6 +40,18 @@ public: void setDeletion(bool deletePaths); }; -} +struct FdLock +{ + Descriptor desc; + bool acquired = false; + + FdLock(Descriptor desc, LockType lockType, bool wait, std::string_view waitMsg); -#include "pathlocks-impl.hh" + ~FdLock() + { + if (acquired) + lockFile(desc, ltNone, false); + } +}; + +} diff --git a/src/libstore/unix/posix-fs-canonicalise.cc b/src/libstore/posix-fs-canonicalise.cc similarity index 88% rename from src/libstore/unix/posix-fs-canonicalise.cc rename to src/libstore/posix-fs-canonicalise.cc index 8b29e90d48f5..d8bae13f5f90 100644 --- a/src/libstore/unix/posix-fs-canonicalise.cc +++ b/src/libstore/posix-fs-canonicalise.cc @@ -31,6 +31,7 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct } +#ifndef _WIN32 // TODO implement if (st.st_mtime != mtimeStore) { struct timeval times[2]; times[0].tv_sec = st.st_atime; @@ -46,6 +47,7 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct #endif throw SysError("changing modification time of '%1%'", path); } +#endif } @@ -57,7 +59,9 @@ void canonicaliseTimestampAndPermissions(const Path & path) static void canonicalisePathMetaData_( const Path & path, +#ifndef _WIN32 std::optional> uidRange, +#endif InodesSeen & inodesSeen) { checkInterrupt(); @@ -99,6 +103,7 @@ static void canonicalisePathMetaData_( } #endif +#ifndef _WIN32 /* Fail if the file is not owned by the build user. This prevents us from messing up the ownership/permissions of files hard-linked into the output (e.g. "ln /etc/shadow $out/foo"). @@ -112,11 +117,13 @@ static void canonicalisePathMetaData_( assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore)); return; } +#endif inodesSeen.insert(Inode(st.st_dev, st.st_ino)); canonicaliseTimestampAndPermissions(path, st); +#ifndef _WIN32 /* Change ownership to the current uid. If it's a symlink, use lchown if available, otherwise don't bother. Wrong ownership of a symlink doesn't matter, since the owning user can't change @@ -134,22 +141,35 @@ static void canonicalisePathMetaData_( throw SysError("changing owner of '%1%' to %2%", path, geteuid()); } +#endif if (S_ISDIR(st.st_mode)) { - DirEntries entries = readDirectory(path); - for (auto & i : entries) - canonicalisePathMetaData_(path + "/" + i.name, uidRange, inodesSeen); + for (auto & i : std::filesystem::directory_iterator{path}) + canonicalisePathMetaData_( + i.path().string(), +#ifndef _WIN32 + uidRange, +#endif + inodesSeen); } } void canonicalisePathMetaData( const Path & path, +#ifndef _WIN32 std::optional> uidRange, +#endif InodesSeen & inodesSeen) { - canonicalisePathMetaData_(path, uidRange, inodesSeen); + canonicalisePathMetaData_( + path, +#ifndef _WIN32 + uidRange, +#endif + inodesSeen); +#ifndef _WIN32 /* On platforms that don't have lchown(), the top-level path can't be a symlink, since we can't change its ownership. */ auto st = lstat(path); @@ -158,14 +178,23 @@ void canonicalisePathMetaData( assert(S_ISLNK(st.st_mode)); throw Error("wrong ownership of top-level store path '%1%'", path); } +#endif } -void canonicalisePathMetaData(const Path & path, - std::optional> uidRange) +void canonicalisePathMetaData(const Path & path +#ifndef _WIN32 + , std::optional> uidRange +#endif + ) { InodesSeen inodesSeen; - canonicalisePathMetaData(path, uidRange, inodesSeen); + canonicalisePathMetaData_( + path, +#ifndef _WIN32 + uidRange, +#endif + inodesSeen); } } diff --git a/src/libstore/unix/posix-fs-canonicalise.hh b/src/libstore/posix-fs-canonicalise.hh similarity index 85% rename from src/libstore/unix/posix-fs-canonicalise.hh rename to src/libstore/posix-fs-canonicalise.hh index 35644af125fb..45a4f3f20694 100644 --- a/src/libstore/unix/posix-fs-canonicalise.hh +++ b/src/libstore/posix-fs-canonicalise.hh @@ -24,7 +24,7 @@ typedef std::set InodesSeen; * without execute permission; setuid bits etc. are cleared) * * - the owner and group are set to the Nix user and group, if we're - * running as root. + * running as root. (Unix only.) * * If uidRange is not empty, this function will throw an error if it * encounters files owned by a user outside of the closed interval @@ -32,11 +32,17 @@ typedef std::set InodesSeen; */ void canonicalisePathMetaData( const Path & path, +#ifndef _WIN32 std::optional> uidRange, +#endif InodesSeen & inodesSeen); + void canonicalisePathMetaData( - const Path & path, - std::optional> uidRange); + const Path & path +#ifndef _WIN32 + , std::optional> uidRange = std::nullopt +#endif + ); void canonicaliseTimestampAndPermissions(const Path & path); diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc index 73d3976f43ae..d0da96262136 100644 --- a/src/libstore/profiles.cc +++ b/src/libstore/profiles.cc @@ -34,12 +34,12 @@ std::pair> findGenerations(Path pro { Generations gens; - Path profileDir = dirOf(profile); + std::filesystem::path profileDir = dirOf(profile); auto profileName = std::string(baseNameOf(profile)); - for (auto & i : readDirectory(profileDir)) { - if (auto n = parseName(profileName, i.name)) { - auto path = profileDir + "/" + i.name; + for (auto & i : std::filesystem::directory_iterator{profileDir}) { + if (auto n = parseName(profileName, i.path().filename().string())) { + auto path = i.path().string(); gens.push_back({ .number = *n, .path = path, @@ -338,6 +338,8 @@ Path getDefaultProfile() return absPath(readLink(profileLink), dirOf(profileLink)); } catch (Error &) { return profileLink; + } catch (std::filesystem::filesystem_error &) { + return profileLink; } } diff --git a/src/libstore/unix/schema.sql b/src/libstore/schema.sql similarity index 100% rename from src/libstore/unix/schema.sql rename to src/libstore/schema.sql diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 0cf92b114c70..220d5d31b7a7 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -108,7 +108,7 @@ struct MountedSSHStoreConfig : virtual SSHStoreConfig, virtual LocalFSStoreConfi { } - const std::string name() override { return "Experimental SSH Store with filesytem mounted"; } + const std::string name() override { return "Experimental SSH Store with filesystem mounted"; } std::string doc() override { diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 04f45827943c..7e730299ab7e 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -31,11 +31,11 @@ void SSHMaster::addCommonSSHOpts(Strings & args) if (!keyFile.empty()) args.insert(args.end(), {"-i", keyFile}); if (!sshPublicHostKey.empty()) { - Path fileName = (Path) *state->tmpDir + "/host-key"; + std::filesystem::path fileName = state->tmpDir->path() / "host-key"; auto p = host.rfind("@"); std::string thost = p != std::string::npos ? std::string(host, p + 1) : host; - writeFile(fileName, thost + " " + base64Decode(sshPublicHostKey) + "\n"); - args.insert(args.end(), {"-oUserKnownHostsFile=" + fileName}); + writeFile(fileName.string(), thost + " " + base64Decode(sshPublicHostKey) + "\n"); + args.insert(args.end(), {"-oUserKnownHostsFile=" + fileName.string()}); } if (compress) args.push_back("-C"); diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 118e5de9f99e..419c55e92395 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -20,10 +20,6 @@ #include "signals.hh" #include "users.hh" -#ifndef _WIN32 -# include "remote-store.hh" -#endif - #include #include @@ -54,7 +50,7 @@ Path Store::followLinksToStore(std::string_view _path) const { Path path = absPath(std::string(_path)); while (!isInStore(path)) { - if (!isLink(path)) break; + if (!std::filesystem::is_symlink(path)) break; auto target = readLink(path); path = absPath(target, dirOf(path)); } @@ -167,14 +163,13 @@ StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const std::pair StoreDirConfig::computeStorePath( std::string_view name, - SourceAccessor & accessor, - const CanonPath & path, + const SourcePath & path, ContentAddressMethod method, HashAlgorithm hashAlgo, const StorePathSet & references, PathFilter & filter) const { - auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter); + auto h = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter); return { makeFixedOutputPathFromCA( name, @@ -192,8 +187,7 @@ std::pair StoreDirConfig::computeStorePath( StorePath Store::addToStore( std::string_view name, - SourceAccessor & accessor, - const CanonPath & path, + const SourcePath & path, ContentAddressMethod method, HashAlgorithm hashAlgo, const StorePathSet & references, @@ -214,7 +208,7 @@ StorePath Store::addToStore( break; } auto source = sinkToSource([&](Sink & sink) { - dumpPath(accessor, path, sink, fsm, filter); + dumpPath(path, sink, fsm, filter); }); return addToStoreFromDump(*source, name, fsm, method, hashAlgo, references, repair); } @@ -343,8 +337,7 @@ digraph graphname { */ ValidPathInfo Store::addToStoreSlow( std::string_view name, - SourceAccessor & accessor, - const CanonPath & srcPath, + const SourcePath & srcPath, ContentAddressMethod method, HashAlgorithm hashAlgo, const StorePathSet & references, std::optional expectedCAHash) @@ -366,7 +359,7 @@ ValidPathInfo Store::addToStoreSlow( srcPath. The fact that we use scratchpadSink as a temporary buffer here is an implementation detail. */ auto fileSource = sinkToSource([&](Sink & scratchpadSink) { - accessor.dumpPath(srcPath, scratchpadSink); + srcPath.dumpPath(scratchpadSink); }); /* tapped provides the same data as fileSource, but we also write all the @@ -389,13 +382,12 @@ ValidPathInfo Store::addToStoreSlow( auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256 ? narHash : method == FileIngestionMethod::Git - ? git::dumpHash(hashAlgo, accessor, srcPath).hash + ? git::dumpHash(hashAlgo, srcPath).hash : caHashSink.finish().first; if (expectedCAHash && expectedCAHash != hash) throw Error("hash mismatch for '%s'", srcPath); - ValidPathInfo info { *this, name, @@ -412,7 +404,7 @@ ValidPathInfo Store::addToStoreSlow( if (!isValidPath(info.path)) { auto source = sinkToSource([&](Sink & scratchpadSink) { - accessor.dumpPath(srcPath, scratchpadSink); + srcPath.dumpPath(scratchpadSink); }); addToStore(info, *source); } @@ -1269,10 +1261,9 @@ Derivation Store::readInvalidDerivation(const StorePath & drvPath) } -#ifndef _WIN32 -# include "local-store.hh" -# include "uds-remote-store.hh" -#endif + +#include "local-store.hh" +#include "uds-remote-store.hh" namespace nix { @@ -1290,9 +1281,6 @@ std::pair splitUriAndParams(const std::string & uri_ return {uri, params}; } -#ifdef _WIN32 // Unused on Windows because the next `#ifndef` -[[maybe_unused]] -#endif static bool isNonUriPath(const std::string & spec) { return @@ -1307,7 +1295,6 @@ std::shared_ptr openFromNonUri(const std::string & uri, const Store::Para { // TODO reenable on Windows once we have `LocalStore` and // `UDSRemoteStore`. - #ifndef _WIN32 if (uri == "" || uri == "auto") { auto stateDir = getOr(params, "state", settings.nixStateDir); if (access(stateDir.c_str(), R_OK | W_OK) == 0) @@ -1352,9 +1339,6 @@ std::shared_ptr openFromNonUri(const std::string & uri, const Store::Para } else { return nullptr; } - #else - return nullptr; - #endif } // The `parseURL` function supports both IPv6 URIs as defined in diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 5f683a21139f..ae8c224374fa 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -439,8 +439,7 @@ public: */ virtual StorePath addToStore( std::string_view name, - SourceAccessor & accessor, - const CanonPath & path, + const SourcePath & path, ContentAddressMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, const StorePathSet & references = StorePathSet(), @@ -454,8 +453,7 @@ public: */ ValidPathInfo addToStoreSlow( std::string_view name, - SourceAccessor & accessor, - const CanonPath & path, + const SourcePath & path, ContentAddressMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, const StorePathSet & references = StorePathSet(), diff --git a/src/libstore/store-dir-config.hh b/src/libstore/store-dir-config.hh index 7ca8c2665c38..643f8854dd03 100644 --- a/src/libstore/store-dir-config.hh +++ b/src/libstore/store-dir-config.hh @@ -13,6 +13,8 @@ namespace nix { +struct SourcePath; + MakeError(BadStorePath, Error); struct StoreDirConfig : public Config @@ -94,8 +96,7 @@ struct StoreDirConfig : public Config */ std::pair computeStorePath( std::string_view name, - SourceAccessor & accessor, - const CanonPath & path, + const SourcePath & path, ContentAddressMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, const StorePathSet & references = {}, diff --git a/src/libstore/unix/uds-remote-store.cc b/src/libstore/uds-remote-store.cc similarity index 86% rename from src/libstore/unix/uds-remote-store.cc rename to src/libstore/uds-remote-store.cc index 226cdf7175c6..649644146bf0 100644 --- a/src/libstore/unix/uds-remote-store.cc +++ b/src/libstore/uds-remote-store.cc @@ -2,16 +2,20 @@ #include "unix-domain-socket.hh" #include "worker-protocol.hh" +#include #include #include -#include -#include #include #include #include -#include - +#ifdef _WIN32 +# include +# include +#else +# include +# include +#endif namespace nix { @@ -57,7 +61,7 @@ std::string UDSRemoteStore::getUri() void UDSRemoteStore::Connection::closeWrite() { - shutdown(fd.get(), SHUT_WR); + shutdown(toSocket(fd.get()), SHUT_WR); } @@ -68,7 +72,7 @@ ref UDSRemoteStore::openConnection() /* Connect to a daemon that does the privileged work for us. */ conn->fd = createUnixDomainSocket(); - nix::connect(conn->fd.get(), path ? *path : settings.nixDaemonSocketFile); + nix::connect(toSocket(conn->fd.get()), path ? *path : settings.nixDaemonSocketFile); conn->from.fd = conn->fd.get(); conn->to.fd = conn->fd.get(); diff --git a/src/libstore/unix/uds-remote-store.hh b/src/libstore/uds-remote-store.hh similarity index 100% rename from src/libstore/unix/uds-remote-store.hh rename to src/libstore/uds-remote-store.hh diff --git a/src/libstore/unix/uds-remote-store.md b/src/libstore/uds-remote-store.md similarity index 100% rename from src/libstore/unix/uds-remote-store.md rename to src/libstore/uds-remote-store.md diff --git a/src/libstore/unix/build/derivation-goal.cc b/src/libstore/unix/build/derivation-goal.cc index 4d43429965df..89518b055536 100644 --- a/src/libstore/unix/build/derivation-goal.cc +++ b/src/libstore/unix/build/derivation-goal.cc @@ -30,32 +30,6 @@ #include #include -#if HAVE_STATVFS -#include -#endif - -/* Includes required for chroot support. */ -#if __linux__ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#if HAVE_SECCOMP -#include -#endif -#define pivot_root(new_root, put_old) (syscall(SYS_pivot_root, new_root, put_old)) -#endif - -#if __APPLE__ -#include -#include -#endif - #include #include @@ -809,7 +783,7 @@ static void movePath(const Path & src, const Path & dst) if (changePerm) chmod_(src, st.st_mode | S_IWUSR); - renameFile(src, dst); + std::filesystem::rename(src, dst); if (changePerm) chmod_(dst, st.st_mode); diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index aad5173e7af5..16095cf5d496 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -77,7 +77,7 @@ void handleDiffHook( try { auto diffRes = runProgram(RunOptions { .program = diffHook, - .searchPath = true, + .lookupPath = true, .args = {tryA, tryB, drvPath, tmpDir}, .uid = uid, .gid = gid, @@ -177,6 +177,10 @@ void LocalDerivationGoal::killSandbox(bool getStats) void LocalDerivationGoal::tryLocalBuild() { +#if __APPLE__ + additionalSandboxProfile = parsedDrv->getStringAttr("__sandboxProfile").value_or(""); +#endif + unsigned int curBuilds = worker.getNrLocalBuilds(); if (curBuilds >= settings.maxBuildJobs) { state = &DerivationGoal::tryToBuild; @@ -281,7 +285,7 @@ static void movePath(const Path & src, const Path & dst) if (changePerm) chmod_(src, st.st_mode | S_IWUSR); - renameFile(src, dst); + std::filesystem::rename(src, dst); if (changePerm) chmod_(dst, st.st_mode); @@ -368,7 +372,7 @@ bool LocalDerivationGoal::cleanupDecideWhetherDiskFull() if (buildMode != bmCheck && status.known->isValid()) continue; auto p = worker.store.toRealPath(status.known->path); if (pathExists(chrootRootDir + p)) - renameFile((chrootRootDir + p), p); + std::filesystem::rename((chrootRootDir + p), p); } return diskFull; @@ -417,7 +421,9 @@ static void doBind(const Path & source, const Path & target, bool optional = fal } else if (S_ISLNK(st.st_mode)) { // Symlinks can (apparently) not be bind-mounted, so just copy it createDirs(dirOf(target)); - copyFile(source, target, /* andDelete */ false); + copyFile( + std::filesystem::path(source), + std::filesystem::path(target), false); } else { createDirs(dirOf(target)); writeFile(target, ""); @@ -495,10 +501,6 @@ void LocalDerivationGoal::startBuilder() settings.thisSystem, concatStringsSep(", ", worker.store.systemFeatures)); -#if __APPLE__ - additionalSandboxProfile = parsedDrv->getStringAttr("__sandboxProfile").value_or(""); -#endif - /* Create a temporary directory where the build will take place. */ tmpDir = createTempDir(settings.buildDir.get().value_or(""), "nix-build-" + std::string(drvPath.name()), false, false, 0700); @@ -1306,8 +1308,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In StorePath addToStore( std::string_view name, - SourceAccessor & accessor, - const CanonPath & srcPath, + const SourcePath & srcPath, ContentAddressMethod method, HashAlgorithm hashAlgo, const StorePathSet & references, @@ -2485,7 +2486,6 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() /* FIXME optimize and deduplicate with addToStore */ std::string oldHashPart { scratchPath->hashPart() }; auto got = [&]{ - PosixSourceAccessor accessor; auto fim = outputHash.method.getFileIngestionMethod(); switch (fim) { case FileIngestionMethod::Flat: @@ -2494,15 +2494,15 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() HashModuloSink caSink { outputHash.hashAlgo, oldHashPart }; auto fim = outputHash.method.getFileIngestionMethod(); dumpPath( - accessor, CanonPath { actualPath }, + {getFSSourceAccessor(), CanonPath(actualPath)}, caSink, (FileSerialisationMethod) fim); return caSink.finish().first; } case FileIngestionMethod::Git: { return git::dumpHash( - outputHash.hashAlgo, accessor, - CanonPath { tmpDir + "/tmp" }).hash; + outputHash.hashAlgo, + {getFSSourceAccessor(), CanonPath(tmpDir + "/tmp")}).hash; } } assert(false); @@ -2529,9 +2529,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() } { - PosixSourceAccessor accessor; HashResult narHashAndSize = hashPath( - accessor, CanonPath { actualPath }, + {getFSSourceAccessor(), CanonPath(actualPath)}, FileSerialisationMethod::Recursive, HashAlgorithm::SHA256); newInfo0.narHash = narHashAndSize.first; newInfo0.narSize = narHashAndSize.second; @@ -2553,9 +2552,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() std::string { scratchPath->hashPart() }, std::string { requiredFinalPath.hashPart() }); rewriteOutput(outputRewrites); - PosixSourceAccessor accessor; HashResult narHashAndSize = hashPath( - accessor, CanonPath { actualPath }, + {getFSSourceAccessor(), CanonPath(actualPath)}, FileSerialisationMethod::Recursive, HashAlgorithm::SHA256); ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first }; newInfo0.narSize = narHashAndSize.second; @@ -2572,8 +2570,11 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() // Replace the output by a fresh copy of itself to make sure // that there's no stale file descriptor pointing to it Path tmpOutput = actualPath + ".tmp"; - copyFile(actualPath, tmpOutput, true); - renameFile(tmpOutput, actualPath); + copyFile( + std::filesystem::path(actualPath), + std::filesystem::path(tmpOutput), true); + + std::filesystem::rename(tmpOutput, actualPath); auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating { .method = dof.ca.method, diff --git a/src/libstore/unix/build/worker.cc b/src/libstore/unix/build/worker.cc index 815ded3d5b01..03fc280a47d4 100644 --- a/src/libstore/unix/build/worker.cc +++ b/src/libstore/unix/build/worker.cc @@ -530,7 +530,7 @@ bool Worker::pathContentsGood(const StorePath & path) res = false; else { Hash current = hashPath( - *store.getFSAccessor(), CanonPath { store.printStorePath(path) }, + {store.getFSAccessor(), CanonPath(store.printStorePath(path))}, FileIngestionMethod::Recursive, info->narHash.algo); Hash nullHash(HashAlgorithm::SHA256); res = info->narHash == nullHash || info->narHash == current; diff --git a/src/libstore/unix/builtins/unpack-channel.cc b/src/libstore/unix/builtins/unpack-channel.cc index 6f68d4c0bc72..a5f2b8e3adfb 100644 --- a/src/libstore/unix/builtins/unpack-channel.cc +++ b/src/libstore/unix/builtins/unpack-channel.cc @@ -21,10 +21,13 @@ void builtinUnpackChannel( unpackTarfile(src, out); - auto entries = readDirectory(out); - if (entries.size() != 1) + auto entries = std::filesystem::directory_iterator{out}; + auto fileName = entries->path().string(); + auto fileCount = std::distance(std::filesystem::begin(entries), std::filesystem::end(entries)); + + if (fileCount != 1) throw Error("channel tarball '%s' contains more than one file", src); - renameFile((out + "/" + entries[0].name), (out + "/" + channelName)); + std::filesystem::rename(fileName, (out + "/" + channelName)); } } diff --git a/src/libstore/unix/lock.cc b/src/libstore/unix/lock.cc index fd7af171fca5..023c74e349bf 100644 --- a/src/libstore/unix/lock.cc +++ b/src/libstore/unix/lock.cc @@ -9,8 +9,6 @@ namespace nix { -using namespace nix::unix; - #if __linux__ static std::vector get_group_list(const char *username, gid_t group_id) diff --git a/src/libstore/unix/pathlocks-impl.hh b/src/libstore/unix/pathlocks-impl.hh deleted file mode 100644 index 31fe968bbb16..000000000000 --- a/src/libstore/unix/pathlocks-impl.hh +++ /dev/null @@ -1,38 +0,0 @@ -#pragma once -///@file - -#include "file-descriptor.hh" - -namespace nix::unix { - -/** - * Open (possibly create) a lock file and return the file descriptor. - * -1 is returned if create is false and the lock could not be opened - * because it doesn't exist. Any other error throws an exception. - */ -AutoCloseFD openLockFile(const Path & path, bool create); - -/** - * Delete an open lock file. - */ -void deleteLockFile(const Path & path, int fd); - -enum LockType { ltRead, ltWrite, ltNone }; - -bool lockFile(int fd, LockType lockType, bool wait); - -struct FdLock -{ - int fd; - bool acquired = false; - - FdLock(int fd, LockType lockType, bool wait, std::string_view waitMsg); - - ~FdLock() - { - if (acquired) - lockFile(fd, ltNone, false); - } -}; - -} diff --git a/src/libstore/unix/pathlocks.cc b/src/libstore/unix/pathlocks.cc index 32c1b9ff44a7..af21319a758f 100644 --- a/src/libstore/unix/pathlocks.cc +++ b/src/libstore/unix/pathlocks.cc @@ -14,9 +14,7 @@ namespace nix { -using namespace nix::unix; - -AutoCloseFD unix::openLockFile(const Path & path, bool create) +AutoCloseFD openLockFile(const Path & path, bool create) { AutoCloseFD fd; @@ -28,20 +26,20 @@ AutoCloseFD unix::openLockFile(const Path & path, bool create) } -void unix::deleteLockFile(const Path & path, int fd) +void deleteLockFile(const Path & path, Descriptor desc) { /* Get rid of the lock file. Have to be careful not to introduce races. Write a (meaningless) token to the file to indicate to other processes waiting on this lock that the lock is stale (deleted). */ unlink(path.c_str()); - writeFull(fd, "d"); + writeFull(desc, "d"); /* Note that the result of unlink() is ignored; removing the lock file is an optimisation, not a necessity. */ } -bool unix::lockFile(int fd, LockType lockType, bool wait) +bool lockFile(Descriptor desc, LockType lockType, bool wait) { int type; if (lockType == ltRead) type = LOCK_SH; @@ -50,7 +48,7 @@ bool unix::lockFile(int fd, LockType lockType, bool wait) else abort(); if (wait) { - while (flock(fd, type) != 0) { + while (flock(desc, type) != 0) { checkInterrupt(); if (errno != EINTR) throw SysError("acquiring/releasing lock"); @@ -58,7 +56,7 @@ bool unix::lockFile(int fd, LockType lockType, bool wait) return false; } } else { - while (flock(fd, type | LOCK_NB) != 0) { + while (flock(desc, type | LOCK_NB) != 0) { checkInterrupt(); if (errno == EWOULDBLOCK) return false; if (errno != EINTR) @@ -149,16 +147,16 @@ void PathLocks::unlock() } -FdLock::FdLock(int fd, LockType lockType, bool wait, std::string_view waitMsg) - : fd(fd) +FdLock::FdLock(Descriptor desc, LockType lockType, bool wait, std::string_view waitMsg) + : desc(desc) { if (wait) { - if (!lockFile(fd, lockType, false)) { + if (!lockFile(desc, lockType, false)) { printInfo("%s", waitMsg); - acquired = lockFile(fd, lockType, true); + acquired = lockFile(desc, lockType, true); } } else - acquired = lockFile(fd, lockType, false); + acquired = lockFile(desc, lockType, false); } diff --git a/src/libstore/windows/pathlocks-impl.hh b/src/libstore/windows/pathlocks-impl.hh deleted file mode 100644 index ba3ad28d9480..000000000000 --- a/src/libstore/windows/pathlocks-impl.hh +++ /dev/null @@ -1,2 +0,0 @@ -#pragma once -///@file Needed because Unix-specific counterpart diff --git a/src/libstore/windows/pathlocks.cc b/src/libstore/windows/pathlocks.cc index ab4294c2ab07..738057f68b83 100644 --- a/src/libstore/windows/pathlocks.cc +++ b/src/libstore/windows/pathlocks.cc @@ -1,16 +1,149 @@ #include "logging.hh" #include "pathlocks.hh" +#include "signals.hh" +#include "util.hh" +#include +#include +#include +#include "windows-error.hh" namespace nix { -bool PathLocks::lockPaths(const PathSet & _paths, const std::string & waitMsg, bool wait) +void deleteLockFile(const Path & path, Descriptor desc) { - return true; + + int exit = DeleteFileA(path.c_str()); + if (exit == 0) + warn("%s: &s", path, std::to_string(GetLastError())); } void PathLocks::unlock() { - warn("PathLocks::unlock: not yet implemented"); + for (auto & i : fds) { + if (deletePaths) + deleteLockFile(i.second, i.first); + + if (CloseHandle(i.first) == -1) + printError("error (ignored): cannot close lock file on '%1%'", i.second); + + debug("lock released on '%1%'", i.second); + } + + fds.clear(); +} + +AutoCloseFD openLockFile(const Path & path, bool create) +{ + AutoCloseFD desc = CreateFileA( + path.c_str(), GENERIC_READ | GENERIC_WRITE, FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, + create ? OPEN_ALWAYS : OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL | FILE_FLAG_POSIX_SEMANTICS, NULL); + if (desc.get() == INVALID_HANDLE_VALUE) + warn("%s: %s", path, std::to_string(GetLastError())); + + return desc; +} + +bool lockFile(Descriptor desc, LockType lockType, bool wait) +{ + switch (lockType) { + case ltNone: { + OVERLAPPED ov = {0}; + if (!UnlockFileEx(desc, 0, 2, 0, &ov)) { + WinError winError("Failed to unlock file desc %s", desc); + throw winError; + } + return true; + } + case ltRead: { + OVERLAPPED ov = {0}; + if (!LockFileEx(desc, wait ? 0 : LOCKFILE_FAIL_IMMEDIATELY, 0, 1, 0, &ov)) { + WinError winError("Failed to lock file desc %s", desc); + if (winError.lastError == ERROR_LOCK_VIOLATION) + return false; + throw winError; + } + + ov.Offset = 1; + if (!UnlockFileEx(desc, 0, 1, 0, &ov)) { + WinError winError("Failed to unlock file desc %s", desc); + if (winError.lastError != ERROR_NOT_LOCKED) + throw winError; + } + return true; + } + case ltWrite: { + OVERLAPPED ov = {0}; + ov.Offset = 1; + if (!LockFileEx(desc, LOCKFILE_EXCLUSIVE_LOCK | (wait ? 0 : LOCKFILE_FAIL_IMMEDIATELY), 0, 1, 0, &ov)) { + WinError winError("Failed to lock file desc %s", desc); + if (winError.lastError == ERROR_LOCK_VIOLATION) + return false; + throw winError; + } + + ov.Offset = 0; + if (!UnlockFileEx(desc, 0, 1, 0, &ov)) { + WinError winError("Failed to unlock file desc %s", desc); + if (winError.lastError != ERROR_NOT_LOCKED) + throw winError; + } + return true; + } + default: + assert(false); + } +} + +bool PathLocks::lockPaths(const PathSet & paths, const std::string & waitMsg, bool wait) +{ + assert(fds.empty()); + + for (auto & path : paths) { + checkInterrupt(); + Path lockPath = path + ".lock"; + debug("locking path '%1%'", path); + + AutoCloseFD fd; + + while (1) { + fd = openLockFile(lockPath, true); + if (!lockFile(fd.get(), ltWrite, false)) { + if (wait) { + if (waitMsg != "") + printError(waitMsg); + lockFile(fd.get(), ltWrite, true); + } else { + unlock(); + return false; + } + } + + debug("lock aquired on '%1%'", lockPath); + + struct _stat st; + if (_fstat(fromDescriptorReadOnly(fd.get()), &st) == -1) + throw SysError("statting lock file '%1%'", lockPath); + if (st.st_size != 0) + debug("open lock file '%1%' has become stale", lockPath); + else + break; + } + + fds.push_back(FDPair(fd.release(), lockPath)); + } + return true; +} + +FdLock::FdLock(Descriptor desc, LockType lockType, bool wait, std::string_view waitMsg) + : desc(desc) +{ + if (wait) { + if (!lockFile(desc, lockType, false)) { + printInfo("%s", waitMsg); + acquired = lockFile(desc, lockType, true); + } + } else + acquired = lockFile(desc, lockType, false); } } diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 351ee094b880..04f777d00f1e 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -8,6 +8,7 @@ #include "archive.hh" #include "config.hh" #include "posix-source-accessor.hh" +#include "source-path.hh" #include "file-system.hh" #include "signals.hh" @@ -110,9 +111,9 @@ void SourceAccessor::dumpPath( time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter) { - auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path); - accessor.dumpPath(canonPath, sink, filter); - return accessor.mtime; + auto path2 = PosixSourceAccessor::createAtRoot(path); + path2.dumpPath(sink, filter); + return path2.accessor.dynamic_pointer_cast()->mtime; } void dumpPath(const Path & path, Sink & sink, PathFilter & filter) diff --git a/src/libutil/args.hh b/src/libutil/args.hh index 4b2e1d96055a..7759b74a93cc 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -41,7 +41,7 @@ public: virtual std::string doc() { return ""; } /** - * @brief Get the base directory for the command. + * @brief Get the [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) for the command. * * @return Generally the working directory, but in case of a shebang * interpreter, returns the directory of the script. diff --git a/src/libutil/config-impl.hh b/src/libutil/config-impl.hh index 1da0cb6389ed..1d349fab5db6 100644 --- a/src/libutil/config-impl.hh +++ b/src/libutil/config-impl.hh @@ -116,10 +116,11 @@ T BaseSetting::parse(const std::string & str) const { static_assert(std::is_integral::value, "Integer required."); - if (auto n = string2Int(str)) - return *n; - else + try { + return string2IntWithUnitPrefix(str); + } catch (...) { throw UsageError("setting '%s' has invalid value '%s'", name, str); + } } template diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc index 1e7469cad9d8..9b7000f9f3e7 100644 --- a/src/libutil/experimental-features.cc +++ b/src/libutil/experimental-features.cc @@ -282,7 +282,7 @@ constexpr std::array xpFeatureDetails .tag = Xp::MountedSSHStore, .name = "mounted-ssh-store", .description = R"( - Allow the use of the [`mounted SSH store`](@docroot@/command-ref/new-cli/nix3-help-stores.html#experimental-ssh-store-with-filesytem-mounted). + Allow the use of the [`mounted SSH store`](@docroot@/command-ref/new-cli/nix3-help-stores.html#experimental-ssh-store-with-filesystem-mounted). )", .trackingUrl = "https://github.com/NixOS/nix/milestone/43", }, diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc index 570247b9e7b3..769042d00daf 100644 --- a/src/libutil/file-content-address.cc +++ b/src/libutil/file-content-address.cc @@ -1,6 +1,7 @@ #include "file-content-address.hh" #include "archive.hh" #include "git.hh" +#include "source-path.hh" namespace nix { @@ -68,17 +69,17 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method) void dumpPath( - SourceAccessor & accessor, const CanonPath & path, + const SourcePath & path, Sink & sink, FileSerialisationMethod method, PathFilter & filter) { switch (method) { case FileSerialisationMethod::Flat: - accessor.readFile(path, sink); + path.readFile(sink); break; case FileSerialisationMethod::Recursive: - accessor.dumpPath(path, sink, filter); + path.dumpPath(sink, filter); break; } } @@ -101,27 +102,27 @@ void restorePath( HashResult hashPath( - SourceAccessor & accessor, const CanonPath & path, + const SourcePath & path, FileSerialisationMethod method, HashAlgorithm ha, PathFilter & filter) { HashSink sink { ha }; - dumpPath(accessor, path, sink, method, filter); + dumpPath(path, sink, method, filter); return sink.finish(); } Hash hashPath( - SourceAccessor & accessor, const CanonPath & path, + const SourcePath & path, FileIngestionMethod method, HashAlgorithm ht, PathFilter & filter) { switch (method) { case FileIngestionMethod::Flat: case FileIngestionMethod::Recursive: - return hashPath(accessor, path, (FileSerialisationMethod) method, ht, filter).first; + return hashPath(path, (FileSerialisationMethod) method, ht, filter).first; case FileIngestionMethod::Git: - return git::dumpHash(ht, accessor, path, filter).hash; + return git::dumpHash(ht, path, filter).hash; } assert(false); } diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh index b361ab243f6d..145a8fb1f468 100644 --- a/src/libutil/file-content-address.hh +++ b/src/libutil/file-content-address.hh @@ -7,6 +7,8 @@ namespace nix { +struct SourcePath; + /** * An enumeration of the ways we can serialize file system * objects. @@ -45,7 +47,7 @@ std::string_view renderFileSerialisationMethod(FileSerialisationMethod method); * Dump a serialization of the given file system object. */ void dumpPath( - SourceAccessor & accessor, const CanonPath & path, + const SourcePath & path, Sink & sink, FileSerialisationMethod method, PathFilter & filter = defaultPathFilter); @@ -72,7 +74,7 @@ void restorePath( * ``` */ HashResult hashPath( - SourceAccessor & accessor, const CanonPath & path, + const SourcePath & path, FileSerialisationMethod method, HashAlgorithm ha, PathFilter & filter = defaultPathFilter); @@ -138,7 +140,7 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method); * useful defined for a merkle format. */ Hash hashPath( - SourceAccessor & accessor, const CanonPath & path, + const SourcePath & path, FileIngestionMethod method, HashAlgorithm ha, PathFilter & filter = defaultPathFilter); diff --git a/src/libutil/file-path.hh b/src/libutil/file-path.hh index 6fb1001250b2..6589c4060bbd 100644 --- a/src/libutil/file-path.hh +++ b/src/libutil/file-path.hh @@ -13,9 +13,8 @@ namespace nix { * * @todo drop `NG` suffix and replace the ones in `types.hh`. */ -typedef std::filesystem::path PathNG; -typedef std::list PathsNG; -typedef std::set PathSetNG; +typedef std::list PathsNG; +typedef std::set PathSetNG; /** * Stop gap until `std::filesystem::path_view` from P1030R6 exists in a @@ -23,18 +22,18 @@ typedef std::set PathSetNG; * * @todo drop `NG` suffix and replace the one in `types.hh`. */ -struct PathViewNG : std::basic_string_view +struct PathViewNG : std::basic_string_view { - using string_view = std::basic_string_view; + using string_view = std::basic_string_view; using string_view::string_view; - PathViewNG(const PathNG & path) - : std::basic_string_view(path.native()) + PathViewNG(const std::filesystem::path & path) + : std::basic_string_view(path.native()) { } - PathViewNG(const PathNG::string_type & path) - : std::basic_string_view(path) + PathViewNG(const std::filesystem::path::string_type & path) + : std::basic_string_view(path) { } const string_view & native() const { return *this; } @@ -43,10 +42,19 @@ struct PathViewNG : std::basic_string_view std::string os_string_to_string(PathViewNG::string_view path); -PathNG::string_type string_to_os_string(std::string_view s); +std::filesystem::path::string_type string_to_os_string(std::string_view s); -std::optional maybePathNG(PathView path); +std::optional maybePath(PathView path); -PathNG pathNG(PathView path); +std::filesystem::path pathNG(PathView path); + +/** + * Create string literals with the native character width of paths + */ +#ifndef _WIN32 +# define PATHNG_LITERAL(s) s +#else +# define PATHNG_LITERAL(s) L ## s +#endif } diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index b03bb767b6f1..919bf5d50bd2 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -94,7 +94,7 @@ Path canonPath(PathView path, bool resolveSymlinks) path, [&followCount, &temp, maxFollow, resolveSymlinks] (std::string & result, std::string_view & remaining) { - if (resolveSymlinks && isLink(result)) { + if (resolveSymlinks && fs::is_symlink(result)) { if (++followCount >= maxFollow) throw Error("infinite symlink recursion in path '%0%'", remaining); remaining = (temp = concatStrings(readLink(result), remaining)); @@ -120,10 +120,10 @@ Path canonPath(PathView path, bool resolveSymlinks) Path dirOf(const PathView path) { - Path::size_type pos = path.rfind('/'); + Path::size_type pos = NativePathTrait::rfindPathSep(path); if (pos == path.npos) return "."; - return pos == 0 ? "/" : Path(path, 0, pos); + return fs::path{path}.parent_path().string(); } @@ -217,72 +217,8 @@ bool pathAccessible(const Path & path) Path readLink(const Path & path) { -#ifndef _WIN32 checkInterrupt(); - std::vector buf; - for (ssize_t bufSize = PATH_MAX/4; true; bufSize += bufSize/2) { - buf.resize(bufSize); - ssize_t rlSize = readlink(path.c_str(), buf.data(), bufSize); - if (rlSize == -1) - if (errno == EINVAL) - throw Error("'%1%' is not a symlink", path); - else - throw SysError("reading symbolic link '%1%'", path); - else if (rlSize < bufSize) - return std::string(buf.data(), rlSize); - } -#else - // TODO modern Windows does in fact support symlinks - throw UnimplementedError("reading symbolic link '%1%'", path); -#endif -} - - -bool isLink(const Path & path) -{ - return getFileType(path) == DT_LNK; -} - - -DirEntries readDirectory(DIR *dir, const Path & path) -{ - DirEntries entries; - entries.reserve(64); - - struct dirent * dirent; - while (errno = 0, dirent = readdir(dir)) { /* sic */ - checkInterrupt(); - std::string name = dirent->d_name; - if (name == "." || name == "..") continue; - entries.emplace_back(name, dirent->d_ino, -#ifdef HAVE_STRUCT_DIRENT_D_TYPE - dirent->d_type -#else - DT_UNKNOWN -#endif - ); - } - if (errno) throw SysError("reading directory '%1%'", path); - - return entries; -} - -DirEntries readDirectory(const Path & path) -{ - AutoCloseDir dir(opendir(path.c_str())); - if (!dir) throw SysError("opening directory '%1%'", path); - - return readDirectory(dir.get(), path); -} - - -unsigned char getFileType(const Path & path) -{ - struct stat st = lstat(path); - if (S_ISDIR(st.st_mode)) return DT_DIR; - if (S_ISLNK(st.st_mode)) return DT_LNK; - if (S_ISREG(st.st_mode)) return DT_REG; - return DT_UNKNOWN; + return fs::read_symlink(path).string(); } @@ -380,12 +316,12 @@ void syncParent(const Path & path) } -static void _deletePath(Descriptor parentfd, const Path & path, uint64_t & bytesFreed) +static void _deletePath(Descriptor parentfd, const fs::path & path, uint64_t & bytesFreed) { #ifndef _WIN32 checkInterrupt(); - std::string name(baseNameOf(path)); + std::string name(baseNameOf(path.native())); struct stat st; if (fstatat(parentfd, name.c_str(), &st, @@ -432,8 +368,15 @@ static void _deletePath(Descriptor parentfd, const Path & path, uint64_t & bytes AutoCloseDir dir(fdopendir(fd)); if (!dir) throw SysError("opening directory '%1%'", path); - for (auto & i : readDirectory(dir.get(), path)) - _deletePath(dirfd(dir.get()), path + "/" + i.name, bytesFreed); + + struct dirent * dirent; + while (errno = 0, dirent = readdir(dir.get())) { /* sic */ + checkInterrupt(); + std::string childName = dirent->d_name; + if (childName == "." || childName == "..") continue; + _deletePath(dirfd(dir.get()), path + "/" + childName, bytesFreed); + } + if (errno) throw SysError("reading directory '%1%'", path); } int flags = S_ISDIR(st.st_mode) ? AT_REMOVEDIR : 0; @@ -447,9 +390,9 @@ static void _deletePath(Descriptor parentfd, const Path & path, uint64_t & bytes #endif } -static void _deletePath(const Path & path, uint64_t & bytesFreed) +static void _deletePath(const fs::path & path, uint64_t & bytesFreed) { - Path dir = dirOf(path); + Path dir = dirOf(path.string()); if (dir == "") dir = "/"; @@ -463,7 +406,7 @@ static void _deletePath(const Path & path, uint64_t & bytesFreed) } -void deletePath(const Path & path) +void deletePath(const fs::path & path) { uint64_t dummy; deletePath(path, dummy); @@ -497,7 +440,7 @@ Paths createDirs(const Path & path) } -void deletePath(const Path & path, uint64_t & bytesFreed) +void deletePath(const fs::path & path, uint64_t & bytesFreed) { //Activity act(*logger, lvlDebug, "recursively deleting path '%1%'", path); bytesFreed = 0; @@ -509,7 +452,7 @@ void deletePath(const Path & path, uint64_t & bytesFreed) AutoDelete::AutoDelete() : del{false} {} -AutoDelete::AutoDelete(const std::string & p, bool recursive) : path(p) +AutoDelete::AutoDelete(const fs::path & p, bool recursive) : _path(p) { del = true; this->recursive = recursive; @@ -520,10 +463,9 @@ AutoDelete::~AutoDelete() try { if (del) { if (recursive) - deletePath(path); + deletePath(_path); else { - if (remove(path.c_str()) == -1) - throw SysError("cannot unlink '%1%'", path); + fs::remove(_path); } } } catch (...) { @@ -536,8 +478,8 @@ void AutoDelete::cancel() del = false; } -void AutoDelete::reset(const Path & p, bool recursive) { - path = p; +void AutoDelete::reset(const fs::path & p, bool recursive) { + _path = p; this->recursive = recursive; del = true; } @@ -611,13 +553,7 @@ std::pair createTempFile(const Path & prefix) void createSymlink(const Path & target, const Path & link) { -#ifndef _WIN32 - if (symlink(target.c_str(), link.c_str())) - throw SysError("creating symlink from '%1%' to '%2%'", link, target); -#else - // TODO modern Windows does in fact support symlinks - throw UnimplementedError("createSymlink"); -#endif + fs::create_symlink(target, link); } void replaceSymlink(const Path & target, const Path & link) @@ -627,12 +563,12 @@ void replaceSymlink(const Path & target, const Path & link) try { createSymlink(target, tmp); - } catch (SysError & e) { - if (e.errNo == EEXIST) continue; + } catch (fs::filesystem_error & e) { + if (e.code() == std::errc::file_exists) continue; throw; } - renameFile(tmp, link); + std::filesystem::rename(tmp, link); break; } @@ -655,29 +591,29 @@ static void setWriteTime(const fs::path & p, const struct stat & st) } #endif -void copy(const fs::directory_entry & from, const fs::path & to, bool andDelete) +void copyFile(const fs::path & from, const fs::path & to, bool andDelete) { #ifndef _WIN32 // TODO: Rewrite the `is_*` to use `symlink_status()` - auto statOfFrom = lstat(from.path().c_str()); + auto statOfFrom = lstat(from.c_str()); #endif - auto fromStatus = from.symlink_status(); + auto fromStatus = fs::symlink_status(from); // Mark the directory as writable so that we can delete its children if (andDelete && fs::is_directory(fromStatus)) { - fs::permissions(from.path(), fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow); + fs::permissions(from, fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow); } if (fs::is_symlink(fromStatus) || fs::is_regular_file(fromStatus)) { - fs::copy(from.path(), to, fs::copy_options::copy_symlinks | fs::copy_options::overwrite_existing); + fs::copy(from, to, fs::copy_options::copy_symlinks | fs::copy_options::overwrite_existing); } else if (fs::is_directory(fromStatus)) { fs::create_directory(to); - for (auto & entry : fs::directory_iterator(from.path())) { - copy(entry, to / entry.path().filename(), andDelete); + for (auto & entry : fs::directory_iterator(from)) { + copyFile(entry, to / entry.path().filename(), andDelete); } } else { - throw Error("file '%s' has an unsupported type", from.path()); + throw Error("file '%s' has an unsupported type", from); } #ifndef _WIN32 @@ -685,25 +621,15 @@ void copy(const fs::directory_entry & from, const fs::path & to, bool andDelete) #endif if (andDelete) { if (!fs::is_symlink(fromStatus)) - fs::permissions(from.path(), fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow); - fs::remove(from.path()); + fs::permissions(from, fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow); + fs::remove(from); } } -void copyFile(const Path & oldPath, const Path & newPath, bool andDelete) -{ - return copy(fs::directory_entry(fs::path(oldPath)), fs::path(newPath), andDelete); -} - -void renameFile(const Path & oldName, const Path & newName) -{ - fs::rename(oldName, newName); -} - void moveFile(const Path & oldName, const Path & newName) { try { - renameFile(oldName, newName); + std::filesystem::rename(oldName, newName); } catch (fs::filesystem_error & e) { auto oldPath = fs::path(oldName); auto newPath = fs::path(newName); @@ -717,8 +643,8 @@ void moveFile(const Path & oldName, const Path & newName) if (e.code().value() == EXDEV) { fs::remove(newPath); warn("Can’t rename %s as %s, copying instead", oldName, newName); - copy(fs::directory_entry(oldPath), tempCopyTarget, true); - renameFile( + copyFile(oldPath, tempCopyTarget, true); + std::filesystem::rename( os_string_to_string(PathViewNG { tempCopyTarget }), os_string_to_string(PathViewNG { newPath })); } diff --git a/src/libutil/file-system.hh b/src/libutil/file-system.hh index 0c4e7cfdd489..933e88441e74 100644 --- a/src/libutil/file-system.hh +++ b/src/libutil/file-system.hh @@ -9,6 +9,7 @@ #include "error.hh" #include "logging.hh" #include "file-descriptor.hh" +#include "file-path.hh" #include #include @@ -27,13 +28,6 @@ #include #include -#ifndef HAVE_STRUCT_DIRENT_D_TYPE -#define DT_UNKNOWN 0 -#define DT_REG 1 -#define DT_LNK 2 -#define DT_DIR 3 -#endif - /** * Polyfill for MinGW * @@ -123,29 +117,10 @@ bool pathAccessible(const Path & path); */ Path readLink(const Path & path); -bool isLink(const Path & path); - /** - * Read the contents of a directory. The entries `.` and `..` are - * removed. + * Open a `Descriptor` with read-only access to the given directory. */ -struct DirEntry -{ - std::string name; - ino_t ino; - /** - * one of DT_* - */ - unsigned char type; - DirEntry(std::string name, ino_t ino, unsigned char type) - : name(std::move(name)), ino(ino), type(type) { } -}; - -typedef std::vector DirEntries; - -DirEntries readDirectory(const Path & path); - -unsigned char getFileType(const Path & path); +Descriptor openDirectory(const std::filesystem::path & path); /** * Read the contents of a file into a string. @@ -170,9 +145,9 @@ void syncParent(const Path & path); * recursively. It's not an error if the path does not exist. The * second variant returns the number of bytes and blocks freed. */ -void deletePath(const Path & path); +void deletePath(const std::filesystem::path & path); -void deletePath(const Path & path, uint64_t & bytesFreed); +void deletePath(const std::filesystem::path & path, uint64_t & bytesFreed); /** * Create a directory and all its parents, if necessary. Returns the @@ -194,8 +169,6 @@ void createSymlink(const Path & target, const Path & link); */ void replaceSymlink(const Path & target, const Path & link); -void renameFile(const Path & src, const Path & dst); - /** * Similar to 'renameFile', but fallback to a copy+remove if `src` and `dst` * are on a different filesystem. @@ -211,24 +184,30 @@ void moveFile(const Path & src, const Path & dst); * with the guaranty that the destination will be “fresh”, with no stale inode * or file descriptor pointing to it). */ -void copyFile(const Path & oldPath, const Path & newPath, bool andDelete); +void copyFile(const std::filesystem::path & from, const std::filesystem::path & to, bool andDelete); /** * Automatic cleanup of resources. */ class AutoDelete { - Path path; + std::filesystem::path _path; bool del; bool recursive; public: AutoDelete(); - AutoDelete(const Path & p, bool recursive = true); + AutoDelete(const std::filesystem::path & p, bool recursive = true); ~AutoDelete(); + void cancel(); - void reset(const Path & p, bool recursive = true); - operator Path() const { return path; } - operator PathView() const { return path; } + + void reset(const std::filesystem::path & p, bool recursive = true); + + const std::filesystem::path & path() const { return _path; } + PathViewNG view() const { return _path; } + + operator const std::filesystem::path & () const { return _path; } + operator PathViewNG () const { return _path; } }; diff --git a/src/libutil/git.cc b/src/libutil/git.cc index a60589baa17c..8c538c98820c 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -8,7 +8,6 @@ #include "signals.hh" #include "config.hh" #include "hash.hh" -#include "posix-source-accessor.hh" #include "git.hh" #include "serialise.hh" @@ -269,18 +268,18 @@ void dumpTree(const Tree & entries, Sink & sink, Mode dump( - SourceAccessor & accessor, const CanonPath & path, + const SourcePath & path, Sink & sink, std::function hook, PathFilter & filter, const ExperimentalFeatureSettings & xpSettings) { - auto st = accessor.lstat(path); + auto st = path.lstat(); switch (st.type) { case SourceAccessor::tRegular: { - accessor.readFile(path, sink, [&](uint64_t size) { + path.readFile(sink, [&](uint64_t size) { dumpBlobPrefix(size, sink, xpSettings); }); return st.isExecutable @@ -291,9 +290,9 @@ Mode dump( case SourceAccessor::tDirectory: { Tree entries; - for (auto & [name, _] : accessor.readDirectory(path)) { + for (auto & [name, _] : path.readDirectory()) { auto child = path / name; - if (!filter(child.abs())) continue; + if (!filter(child.path.abs())) continue; auto entry = hook(child); @@ -309,7 +308,7 @@ Mode dump( case SourceAccessor::tSymlink: { - auto target = accessor.readLink(path); + auto target = path.readLink(); dumpBlobPrefix(target.size(), sink, xpSettings); sink(target); return Mode::Symlink; @@ -323,13 +322,14 @@ Mode dump( TreeEntry dumpHash( - HashAlgorithm ha, - SourceAccessor & accessor, const CanonPath & path, PathFilter & filter) + HashAlgorithm ha, + const SourcePath & path, + PathFilter & filter) { std::function hook; - hook = [&](const CanonPath & path) -> TreeEntry { + hook = [&](const SourcePath & path) -> TreeEntry { auto hashSink = HashSink(ha); - auto mode = dump(accessor, path, hashSink, hook, filter); + auto mode = dump(path, hashSink, hook, filter); auto hash = hashSink.finish().first; return { .mode = mode, diff --git a/src/libutil/git.hh b/src/libutil/git.hh index cfea48fbe2db..a65edb964fdf 100644 --- a/src/libutil/git.hh +++ b/src/libutil/git.hh @@ -8,7 +8,7 @@ #include "types.hh" #include "serialise.hh" #include "hash.hh" -#include "source-accessor.hh" +#include "source-path.hh" #include "fs-sink.hh" namespace nix::git { @@ -125,7 +125,7 @@ std::optional convertMode(SourceAccessor::Type type); * Given a `Hash`, return a `SourceAccessor` and `CanonPath` pointing to * the file system object with that path. */ -using RestoreHook = std::pair(Hash); +using RestoreHook = SourcePath(Hash); /** * Wrapper around `parse` and `RestoreSink` @@ -157,10 +157,10 @@ void dumpTree( * Note that if the child is a directory, its child in must also be so * processed in order to compute this information. */ -using DumpHook = TreeEntry(const CanonPath & path); +using DumpHook = TreeEntry(const SourcePath & path); Mode dump( - SourceAccessor & accessor, const CanonPath & path, + const SourcePath & path, Sink & sink, std::function hook, PathFilter & filter = defaultPathFilter, @@ -172,9 +172,9 @@ Mode dump( * A smaller wrapper around `dump`. */ TreeEntry dumpHash( - HashAlgorithm ha, - SourceAccessor & accessor, const CanonPath & path, - PathFilter & filter = defaultPathFilter); + HashAlgorithm ha, + const SourcePath & path, + PathFilter & filter = defaultPathFilter); /** * A line from the output of `git ls-remote --symref`. diff --git a/src/libutil/input-accessor.hh b/src/libutil/input-accessor.hh deleted file mode 100644 index 55b7c2f2f849..000000000000 --- a/src/libutil/input-accessor.hh +++ /dev/null @@ -1,27 +0,0 @@ -#pragma once -///@file - -#include "source-accessor.hh" -#include "ref.hh" -#include "repair-flag.hh" - -namespace nix { - -MakeError(RestrictedPathError, Error); - -struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this -{ - std::optional fingerprint; - - /** - * Return the maximum last-modified time of the files in this - * tree, if available. - */ - virtual std::optional getLastModified() - { - return std::nullopt; - } - -}; - -} diff --git a/src/libutil/linux/cgroup.cc b/src/libutil/linux/cgroup.cc index 8b8942643e35..ec4077478139 100644 --- a/src/libutil/linux/cgroup.cc +++ b/src/libutil/linux/cgroup.cc @@ -47,26 +47,26 @@ std::map getCgroups(const Path & cgroupFile) return cgroups; } -static CgroupStats destroyCgroup(const Path & cgroup, bool returnStats) +static CgroupStats destroyCgroup(const std::filesystem::path & cgroup, bool returnStats) { if (!pathExists(cgroup)) return {}; - auto procsFile = cgroup + "/cgroup.procs"; + auto procsFile = cgroup / "cgroup.procs"; if (!pathExists(procsFile)) throw Error("'%s' is not a cgroup", cgroup); /* Use the fast way to kill every process in a cgroup, if available. */ - auto killFile = cgroup + "/cgroup.kill"; + auto killFile = cgroup / "cgroup.kill"; if (pathExists(killFile)) writeFile(killFile, "1"); /* Otherwise, manually kill every process in the subcgroups and this cgroup. */ - for (auto & entry : readDirectory(cgroup)) { - if (entry.type != DT_DIR) continue; - destroyCgroup(cgroup + "/" + entry.name, false); + for (auto & entry : std::filesystem::directory_iterator{cgroup}) { + if (entry.symlink_status().type() != std::filesystem::file_type::directory) continue; + destroyCgroup(cgroup / entry.path().filename(), false); } int round = 1; @@ -111,7 +111,7 @@ static CgroupStats destroyCgroup(const Path & cgroup, bool returnStats) CgroupStats stats; if (returnStats) { - auto cpustatPath = cgroup + "/cpu.stat"; + auto cpustatPath = cgroup / "cpu.stat"; if (pathExists(cpustatPath)) { for (auto & line : tokenizeString>(readFile(cpustatPath), "\n")) { diff --git a/src/libutil/memory-source-accessor.cc b/src/libutil/memory-source-accessor.cc index 880fa61b7f81..b7207cffb9bb 100644 --- a/src/libutil/memory-source-accessor.cc +++ b/src/libutil/memory-source-accessor.cc @@ -108,7 +108,7 @@ std::string MemorySourceAccessor::readLink(const CanonPath & path) throw Error("file '%s' is not a symbolic link", path); } -CanonPath MemorySourceAccessor::addFile(CanonPath path, std::string && contents) +SourcePath MemorySourceAccessor::addFile(CanonPath path, std::string && contents) { auto * f = open(path, File { File::Regular {} }); if (!f) @@ -118,7 +118,7 @@ CanonPath MemorySourceAccessor::addFile(CanonPath path, std::string && contents) else throw Error("file '%s' is not a regular file", path); - return path; + return SourcePath{ref(shared_from_this()), path}; } @@ -184,4 +184,10 @@ void MemorySink::createSymlink(const Path & path, const std::string & target) throw Error("file '%s' is not a symbolic link", path); } +ref makeEmptySourceAccessor() +{ + static auto empty = make_ref().cast(); + return empty; +} + } diff --git a/src/libutil/memory-source-accessor.hh b/src/libutil/memory-source-accessor.hh index 7a1990d2f720..c8f793922d68 100644 --- a/src/libutil/memory-source-accessor.hh +++ b/src/libutil/memory-source-accessor.hh @@ -1,4 +1,4 @@ -#include "source-accessor.hh" +#include "source-path.hh" #include "fs-sink.hh" #include "variant-wrapper.hh" @@ -69,7 +69,7 @@ struct MemorySourceAccessor : virtual SourceAccessor */ File * open(const CanonPath & path, std::optional create); - CanonPath addFile(CanonPath path, std::string && contents); + SourcePath addFile(CanonPath path, std::string && contents); }; /** diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index a589bfd3d120..225fc852caff 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -1,4 +1,5 @@ #include "posix-source-accessor.hh" +#include "source-path.hh" #include "signals.hh" #include "sync.hh" @@ -17,11 +18,11 @@ PosixSourceAccessor::PosixSourceAccessor() : PosixSourceAccessor(std::filesystem::path {}) { } -std::pair PosixSourceAccessor::createAtRoot(const std::filesystem::path & path) +SourcePath PosixSourceAccessor::createAtRoot(const std::filesystem::path & path) { std::filesystem::path path2 = absPath(path.string()); return { - PosixSourceAccessor { path2.root_path() }, + make_ref(path2.root_path()), CanonPath { path2.relative_path().string() }, }; } @@ -131,16 +132,33 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & { assertNoSymlinks(path); DirEntries res; - for (auto & entry : nix::readDirectory(makeAbsPath(path).string())) { - std::optional type; - switch (entry.type) { - case DT_REG: type = Type::tRegular; break; - #ifndef _WIN32 - case DT_LNK: type = Type::tSymlink; break; - #endif - case DT_DIR: type = Type::tDirectory; break; - } - res.emplace(entry.name, type); + for (auto & entry : std::filesystem::directory_iterator{makeAbsPath(path)}) { + auto type = [&]() -> std::optional { + std::filesystem::file_type nativeType; + try { + nativeType = entry.symlink_status().type(); + } catch (std::filesystem::filesystem_error & e) { + // We cannot always stat the child. (Ideally there is no + // stat because the native directory entry has the type + // already, but this isn't always the case.) + if (e.code() == std::errc::permission_denied || e.code() == std::errc::operation_not_permitted) + return std::nullopt; + else throw; + } + + // cannot exhaustively enumerate because implementation-specific + // additional file types are allowed. +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wswitch-enum" + switch (nativeType) { + case std::filesystem::file_type::regular: return Type::tRegular; break; + case std::filesystem::file_type::symlink: return Type::tSymlink; break; + case std::filesystem::file_type::directory: return Type::tDirectory; break; + default: return tMisc; + } +#pragma GCC diagnostic pop + }(); + res.emplace(entry.path().filename().string(), type); } return res; } @@ -166,4 +184,14 @@ void PosixSourceAccessor::assertNoSymlinks(CanonPath path) } } +ref getFSSourceAccessor() +{ + static auto rootFS = make_ref(); + return rootFS; +} + +ref makeFSSourceAccessor(std::filesystem::path root) +{ + return make_ref(std::move(root)); +} } diff --git a/src/libutil/posix-source-accessor.hh b/src/libutil/posix-source-accessor.hh index 717c8f017794..40f60bb54b8f 100644 --- a/src/libutil/posix-source-accessor.hh +++ b/src/libutil/posix-source-accessor.hh @@ -4,6 +4,8 @@ namespace nix { +struct SourcePath; + /** * A source accessor that uses the Unix filesystem. */ @@ -53,7 +55,7 @@ struct PosixSourceAccessor : virtual SourceAccessor * and * [`std::filesystem::path::relative_path`](https://en.cppreference.com/w/cpp/filesystem/path/relative_path). */ - static std::pair createAtRoot(const std::filesystem::path & path); + static SourcePath createAtRoot(const std::filesystem::path & path); private: diff --git a/src/libutil/processes.hh b/src/libutil/processes.hh index a7e85b5beec4..e319f79e0119 100644 --- a/src/libutil/processes.hh +++ b/src/libutil/processes.hh @@ -80,14 +80,14 @@ pid_t startProcess(std::function fun, const ProcessOptions & options = P * Run a program and return its stdout in a string (i.e., like the * shell backtick operator). */ -std::string runProgram(Path program, bool searchPath = false, +std::string runProgram(Path program, bool lookupPath = false, const Strings & args = Strings(), const std::optional & input = {}, bool isInteractive = false); struct RunOptions { Path program; - bool searchPath = true; + bool lookupPath = true; Strings args; #ifndef _WIN32 std::optional uid; diff --git a/src/libutil/source-accessor.hh b/src/libutil/source-accessor.hh index 1f272327f812..d7fb0af5fac9 100644 --- a/src/libutil/source-accessor.hh +++ b/src/libutil/source-accessor.hh @@ -35,7 +35,7 @@ enum class SymlinkResolution { * filesystem-like entities (such as the real filesystem, tarballs or * Git repositories). */ -struct SourceAccessor +struct SourceAccessor : std::enable_shared_from_this { const size_t number; @@ -168,6 +168,43 @@ struct SourceAccessor CanonPath resolveSymlinks( const CanonPath & path, SymlinkResolution mode = SymlinkResolution::Full); + + /** + * A string that uniquely represents the contents of this + * accessor. This is used for caching lookups (see `fetchToStore()`). + */ + std::optional fingerprint; + + /** + * Return the maximum last-modified time of the files in this + * tree, if available. + */ + virtual std::optional getLastModified() + { return std::nullopt; } }; +/** + * Return a source accessor that contains only an empty root directory. + */ +ref makeEmptySourceAccessor(); + +/** + * Exception thrown when accessing a filtered path (see + * `FilteringSourceAccessor`). + */ +MakeError(RestrictedPathError, Error); + +/** + * Return an accessor for the root filesystem. + */ +ref getFSSourceAccessor(); + +/** + * Construct an accessor for the filesystem rooted at `root`. Note + * that it is not possible to escape `root` by appending `..` path + * elements, and that absolute symlinks are resolved relative to + * `root`. + */ +ref makeFSSourceAccessor(std::filesystem::path root); + } diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc index 2a5b2085828f..023b5ed4b914 100644 --- a/src/libutil/source-path.cc +++ b/src/libutil/source-path.cc @@ -18,13 +18,13 @@ std::string SourcePath::readFile() const bool SourcePath::pathExists() const { return accessor->pathExists(path); } -InputAccessor::Stat SourcePath::lstat() const +SourceAccessor::Stat SourcePath::lstat() const { return accessor->lstat(path); } -std::optional SourcePath::maybeLstat() const +std::optional SourcePath::maybeLstat() const { return accessor->maybeLstat(path); } -InputAccessor::DirEntries SourcePath::readDirectory() const +SourceAccessor::DirEntries SourcePath::readDirectory() const { return accessor->readDirectory(path); } std::string SourcePath::readLink() const diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh index b8f69af126a5..83ec6295de1d 100644 --- a/src/libutil/source-path.hh +++ b/src/libutil/source-path.hh @@ -7,7 +7,7 @@ #include "ref.hh" #include "canon-path.hh" -#include "input-accessor.hh" +#include "source-accessor.hh" namespace nix { @@ -19,10 +19,10 @@ namespace nix { */ struct SourcePath { - ref accessor; + ref accessor; CanonPath path; - SourcePath(ref accessor, CanonPath path = CanonPath::root) + SourcePath(ref accessor, CanonPath path = CanonPath::root) : accessor(std::move(accessor)) , path(std::move(path)) { } @@ -41,6 +41,11 @@ struct SourcePath */ std::string readFile() const; + void readFile( + Sink & sink, + std::function sizeCallback = [](uint64_t size){}) const + { return accessor->readFile(path, sink, sizeCallback); } + /** * Return whether this `SourcePath` denotes a file (of any type) * that exists @@ -51,19 +56,19 @@ struct SourcePath * Return stats about this `SourcePath`, or throw an exception if * it doesn't exist. */ - InputAccessor::Stat lstat() const; + SourceAccessor::Stat lstat() const; /** * Return stats about this `SourcePath`, or std::nullopt if it * doesn't exist. */ - std::optional maybeLstat() const; + std::optional maybeLstat() const; /** * If this `SourcePath` denotes a directory (not a symlink), * return its directory entries; otherwise throw an error. */ - InputAccessor::DirEntries readDirectory() const; + SourceAccessor::DirEntries readDirectory() const; /** * If this `SourcePath` denotes a symlink, return its target; diff --git a/src/libutil/unix/unix-domain-socket.cc b/src/libutil/unix-domain-socket.cc similarity index 84% rename from src/libutil/unix/unix-domain-socket.cc rename to src/libutil/unix-domain-socket.cc index 0bcf9040d952..87914bb83e79 100644 --- a/src/libutil/unix/unix-domain-socket.cc +++ b/src/libutil/unix-domain-socket.cc @@ -1,24 +1,31 @@ #include "file-system.hh" -#include "processes.hh" #include "unix-domain-socket.hh" #include "util.hh" -#include -#include +#ifdef _WIN32 +# include +# include +#else +# include +# include +# include "processes.hh" +#endif #include namespace nix { AutoCloseFD createUnixDomainSocket() { - AutoCloseFD fdSocket = socket(PF_UNIX, SOCK_STREAM + AutoCloseFD fdSocket = toDescriptor(socket(PF_UNIX, SOCK_STREAM #ifdef SOCK_CLOEXEC | SOCK_CLOEXEC #endif - , 0); + , 0)); if (!fdSocket) throw SysError("cannot create Unix domain socket"); +#ifndef _WIN32 closeOnExec(fdSocket.get()); +#endif return fdSocket; } @@ -32,16 +39,15 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode) if (chmod(path.c_str(), mode) == -1) throw SysError("changing permissions on '%1%'", path); - if (listen(fdSocket.get(), 100) == -1) + if (listen(toSocket(fdSocket.get()), 100) == -1) throw SysError("cannot listen on socket '%1%'", path); return fdSocket; } - static void bindConnectProcHelper( std::string_view operationName, auto && operation, - int fd, const std::string & path) + Socket fd, const std::string & path) { struct sockaddr_un addr; addr.sun_family = AF_UNIX; @@ -54,6 +60,9 @@ static void bindConnectProcHelper( auto * psaddr = reinterpret_cast(&addr); if (path.size() + 1 >= sizeof(addr.sun_path)) { +#ifdef _WIN32 + throw Error("cannot %s to socket at '%s': path is too long", operationName, path); +#else Pipe pipe; pipe.create(); Pid pid = startProcess([&] { @@ -83,6 +92,7 @@ static void bindConnectProcHelper( errno = *errNo; throw SysError("cannot %s to socket at '%s'", operationName, path); } +#endif } else { memcpy(addr.sun_path, path.c_str(), path.size() + 1); if (operation(fd, psaddr, sizeof(addr)) == -1) @@ -91,7 +101,7 @@ static void bindConnectProcHelper( } -void bind(int fd, const std::string & path) +void bind(Socket fd, const std::string & path) { unlink(path.c_str()); @@ -99,7 +109,7 @@ void bind(int fd, const std::string & path) } -void connect(int fd, const std::string & path) +void connect(Socket fd, const std::string & path) { bindConnectProcHelper("connect", ::connect, fd, path); } diff --git a/src/libutil/unix-domain-socket.hh b/src/libutil/unix-domain-socket.hh new file mode 100644 index 000000000000..ba2baeb13340 --- /dev/null +++ b/src/libutil/unix-domain-socket.hh @@ -0,0 +1,83 @@ +#pragma once +///@file + +#include "types.hh" +#include "file-descriptor.hh" + +#ifdef _WIN32 +# include +#endif +#include + +namespace nix { + +/** + * Create a Unix domain socket. + */ +AutoCloseFD createUnixDomainSocket(); + +/** + * Create a Unix domain socket in listen mode. + */ +AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode); + +/** + * Often we want to use `Descriptor`, but Windows makes a slightly + * stronger file descriptor vs socket distinction, at least at the level + * of C types. + */ +using Socket = +#ifdef _WIN32 + SOCKET +#else + int +#endif + ; + +#ifdef _WIN32 +/** + * Windows gives this a different name + */ +# define SHUT_WR SD_SEND +# define SHUT_RDWR SD_BOTH +#endif + +/** + * Convert a `Socket` to a `Descriptor` + * + * This is a no-op except on Windows. + */ +static inline Socket toSocket(Descriptor fd) +{ +#ifdef _WIN32 + return reinterpret_cast(fd); +#else + return fd; +#endif +} + +/** + * Convert a `Socket` to a `Descriptor` + * + * This is a no-op except on Windows. + */ +static inline Descriptor fromSocket(Socket fd) +{ +#ifdef _WIN32 + return reinterpret_cast(fd); +#else + return fd; +#endif +} + +/** + * Bind a Unix domain socket to a path. + */ +void bind(Socket fd, const std::string & path); + +/** + * Connect to a Unix domain socket. + */ +void connect(Socket fd, const std::string & path); + +} diff --git a/src/libutil/unix/file-descriptor.cc b/src/libutil/unix/file-descriptor.cc index 27c8d821b5dc..84a33af8181b 100644 --- a/src/libutil/unix/file-descriptor.cc +++ b/src/libutil/unix/file-descriptor.cc @@ -124,8 +124,8 @@ void closeMostFDs(const std::set & exceptions) { #if __linux__ try { - for (auto & s : readDirectory("/proc/self/fd")) { - auto fd = std::stoi(s.name); + for (auto & s : std::filesystem::directory_iterator{"/proc/self/fd"}) { + auto fd = std::stoi(s.path().filename()); if (!exceptions.count(fd)) { debug("closing leaked FD %d", fd); close(fd); @@ -133,6 +133,7 @@ void closeMostFDs(const std::set & exceptions) } return; } catch (SysError &) { + } catch (std::filesystem::filesystem_error &) { } #endif diff --git a/src/libutil/unix/file-path.cc b/src/libutil/unix/file-path.cc index 54a1cc278d43..294048a2f8f5 100644 --- a/src/libutil/unix/file-path.cc +++ b/src/libutil/unix/file-path.cc @@ -13,17 +13,17 @@ std::string os_string_to_string(PathViewNG::string_view path) return std::string { path }; } -PathNG::string_type string_to_os_string(std::string_view s) +std::filesystem::path::string_type string_to_os_string(std::string_view s) { return std::string { s }; } -std::optional maybePathNG(PathView path) +std::optional maybePath(PathView path) { return { path }; } -PathNG pathNG(PathView path) +std::filesystem::path pathNG(PathView path) { return path; } diff --git a/src/libutil/unix/file-system.cc b/src/libutil/unix/file-system.cc new file mode 100644 index 000000000000..bbbbfa5597c4 --- /dev/null +++ b/src/libutil/unix/file-system.cc @@ -0,0 +1,10 @@ +#include "file-system.hh" + +namespace nix { + +Descriptor openDirectory(const std::filesystem::path & path) +{ + return open(path.c_str(), O_RDONLY | O_DIRECTORY); +} + +} diff --git a/src/libutil/unix/processes.cc b/src/libutil/unix/processes.cc index f5d584330ab2..1af559a21b90 100644 --- a/src/libutil/unix/processes.cc +++ b/src/libutil/unix/processes.cc @@ -245,10 +245,10 @@ pid_t startProcess(std::function fun, const ProcessOptions & options) } -std::string runProgram(Path program, bool searchPath, const Strings & args, +std::string runProgram(Path program, bool lookupPath, const Strings & args, const std::optional & input, bool isInteractive) { - auto res = runProgram(RunOptions {.program = program, .searchPath = searchPath, .args = args, .input = input, .isInteractive = isInteractive}); + auto res = runProgram(RunOptions {.program = program, .lookupPath = lookupPath, .args = args, .input = input, .isInteractive = isInteractive}); if (!statusOk(res.first)) throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first)); @@ -335,7 +335,7 @@ void runProgram2(const RunOptions & options) restoreProcessContext(); - if (options.searchPath) + if (options.lookupPath) execvp(options.program.c_str(), stringsToCharPtrs(args_).data()); // This allows you to refer to a program with a pathname relative // to the PATH variable. diff --git a/src/libutil/unix/unix-domain-socket.hh b/src/libutil/unix/unix-domain-socket.hh deleted file mode 100644 index b78feb454b18..000000000000 --- a/src/libutil/unix/unix-domain-socket.hh +++ /dev/null @@ -1,31 +0,0 @@ -#pragma once -///@file - -#include "types.hh" -#include "file-descriptor.hh" - -#include - -namespace nix { - -/** - * Create a Unix domain socket. - */ -AutoCloseFD createUnixDomainSocket(); - -/** - * Create a Unix domain socket in listen mode. - */ -AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode); - -/** - * Bind a Unix domain socket to a path. - */ -void bind(int fd, const std::string & path); - -/** - * Connect to a Unix domain socket. - */ -void connect(int fd, const std::string & path); - -} diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 11a0431da891..8b049875a894 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -120,7 +120,7 @@ std::optional string2Int(const std::string_view s) template N string2IntWithUnitPrefix(std::string_view s) { - N multiplier = 1; + uint64_t multiplier = 1; if (!s.empty()) { char u = std::toupper(*s.rbegin()); if (std::isalpha(u)) { diff --git a/src/libutil/windows/file-path.cc b/src/libutil/windows/file-path.cc index d2f385f50d0c..3114ac4dfa7e 100644 --- a/src/libutil/windows/file-path.cc +++ b/src/libutil/windows/file-path.cc @@ -12,35 +12,35 @@ namespace nix { std::string os_string_to_string(PathViewNG::string_view path) { std::wstring_convert> converter; - return converter.to_bytes(PathNG::string_type { path }); + return converter.to_bytes(std::filesystem::path::string_type { path }); } -PathNG::string_type string_to_os_string(std::string_view s) +std::filesystem::path::string_type string_to_os_string(std::string_view s) { std::wstring_convert> converter; return converter.from_bytes(std::string { s }); } -std::optional maybePathNG(PathView path) +std::optional maybePath(PathView path) { if (path.length() >= 3 && (('A' <= path[0] && path[0] <= 'Z') || ('a' <= path[0] && path[0] <= 'z')) && path[1] == ':' && WindowsPathTrait::isPathSep(path[2])) { - PathNG::string_type sw = string_to_os_string( + std::filesystem::path::string_type sw = string_to_os_string( std::string { "\\\\?\\" } + path); std::replace(sw.begin(), sw.end(), '/', '\\'); return sw; } if (path.length() >= 7 && path[0] == '\\' && path[1] == '\\' && (path[2] == '.' || path[2] == '?') && path[3] == '\\' && ('A' <= path[4] && path[4] <= 'Z') && path[5] == ':' && WindowsPathTrait::isPathSep(path[6])) { - PathNG::string_type sw = string_to_os_string(path); + std::filesystem::path::string_type sw = string_to_os_string(path); std::replace(sw.begin(), sw.end(), '/', '\\'); return sw; } - return std::optional(); + return std::optional(); } -PathNG pathNG(PathView path) +std::filesystem::path pathNG(PathView path) { - std::optional sw = maybePathNG(path); + std::optional sw = maybePath(path); if (!sw) { // FIXME why are we not using the regular error handling? std::cerr << "invalid path for WinAPI call ["< & input, bool isInteractive) { throw UnimplementedError("Cannot shell out to git on Windows yet"); diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 198e9cda013b..b601604dc8b5 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -25,6 +25,7 @@ #include "attr-path.hh" #include "legacy.hh" #include "users.hh" +#include "network-proxy.hh" using namespace nix; using namespace std::string_literals; @@ -121,8 +122,8 @@ static void main_nix_build(int argc, char * * argv) "HOME", "XDG_RUNTIME_DIR", "USER", "LOGNAME", "DISPLAY", "WAYLAND_DISPLAY", "WAYLAND_SOCKET", "PATH", "TERM", "IN_NIX_SHELL", "NIX_SHELL_PRESERVE_PROMPT", "TZ", "PAGER", "NIX_BUILD_SHELL", "SHLVL", - "http_proxy", "https_proxy", "ftp_proxy", "all_proxy", "no_proxy" }; + keepVars.insert(networkProxyVariables.begin(), networkProxyVariables.end()); Strings args; for (int i = 1; i < argc; ++i) @@ -170,7 +171,7 @@ static void main_nix_build(int argc, char * * argv) ; // obsolete else if (*arg == "--no-out-link" || *arg == "--no-link") - outLink = (Path) tmpDir + "/result"; + outLink = (tmpDir.path() / "result").string(); else if (*arg == "--attr" || *arg == "-A") attrPaths.push_back(getArg(*arg, arg, end)); @@ -258,7 +259,7 @@ static void main_nix_build(int argc, char * * argv) auto store = openStore(); auto evalStore = myArgs.evalStoreUrl ? openStore(*myArgs.evalStoreUrl) : store; - auto state = std::make_unique(myArgs.searchPath, evalStore, store); + auto state = std::make_unique(myArgs.lookupPath, evalStore, store); state->repair = myArgs.repair; if (myArgs.repair) buildMode = bmRepair; @@ -503,7 +504,7 @@ static void main_nix_build(int argc, char * * argv) if (passAsFile.count(var.first)) { keepTmp = true; auto fn = ".attr-" + std::to_string(fileNr++); - Path p = (Path) tmpDir + "/" + fn; + Path p = (tmpDir.path() / fn).string(); writeFile(p, var.second); env[var.first + "Path"] = p; } else @@ -535,10 +536,10 @@ static void main_nix_build(int argc, char * * argv) auto json = structAttrs.value(); structuredAttrsRC = writeStructuredAttrsShell(json); - auto attrsJSON = (Path) tmpDir + "/.attrs.json"; + auto attrsJSON = (tmpDir.path() / ".attrs.json").string(); writeFile(attrsJSON, json.dump()); - auto attrsSH = (Path) tmpDir + "/.attrs.sh"; + auto attrsSH = (tmpDir.path() / ".attrs.sh").string(); writeFile(attrsSH, structuredAttrsRC); env["NIX_ATTRS_SH_FILE"] = attrsSH; @@ -551,7 +552,7 @@ static void main_nix_build(int argc, char * * argv) convenience, source $stdenv/setup to setup additional environment variables and shell functions. Also don't lose the current $PATH directories. */ - auto rcfile = (Path) tmpDir + "/rc"; + auto rcfile = (tmpDir.path() / "rc").string(); std::string rc = fmt( R"(_nix_shell_clean_tmpdir() { command rm -rf %1%; }; )"s + (keepTmp ? @@ -582,7 +583,7 @@ static void main_nix_build(int argc, char * * argv) "unset TZ; %6%" "shopt -s execfail;" "%7%", - shellEscape(tmpDir), + shellEscape(tmpDir.path().string()), (pure ? "" : "p=$PATH; "), (pure ? "" : "PATH=$PATH:$p; unset p; "), shellEscape(dirOf(*shell)), diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc index bb3f1bc6add8..91209c978980 100644 --- a/src/nix-collect-garbage/nix-collect-garbage.cc +++ b/src/nix-collect-garbage/nix-collect-garbage.cc @@ -27,18 +27,18 @@ void removeOldGenerations(std::string dir) bool canWrite = access(dir.c_str(), W_OK) == 0; - for (auto & i : readDirectory(dir)) { + for (auto & i : std::filesystem::directory_iterator{dir}) { checkInterrupt(); - auto path = dir + "/" + i.name; - auto type = i.type == DT_UNKNOWN ? getFileType(path) : i.type; + auto path = i.path().string(); + auto type = i.symlink_status().type(); - if (type == DT_LNK && canWrite) { + if (type == std::filesystem::file_type::symlink && canWrite) { std::string link; try { link = readLink(path); - } catch (SysError & e) { - if (e.errNo == ENOENT) continue; + } catch (std::filesystem::filesystem_error & e) { + if (e.code() == std::errc::no_such_file_or_directory) continue; throw; } if (link.find("link") != std::string::npos) { @@ -49,7 +49,7 @@ void removeOldGenerations(std::string dir) } else deleteOldGenerations(path, dryRun); } - } else if (type == DT_DIR) { + } else if (type == std::filesystem::file_type::directory) { removeOldGenerations(path); } } diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index eeca01833b2d..b5e13cc2308b 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -94,11 +94,11 @@ static bool parseInstallSourceOptions(Globals & globals, } -static bool isNixExpr(const SourcePath & path, struct InputAccessor::Stat & st) +static bool isNixExpr(const SourcePath & path, struct SourceAccessor::Stat & st) { return - st.type == InputAccessor::tRegular - || (st.type == InputAccessor::tDirectory && (path / "default.nix").resolveSymlinks().pathExists()); + st.type == SourceAccessor::tRegular + || (st.type == SourceAccessor::tDirectory && (path / "default.nix").resolveSymlinks().pathExists()); } @@ -119,14 +119,14 @@ static void getAllExprs(EvalState & state, auto path2 = (path / i).resolveSymlinks(); - InputAccessor::Stat st; + SourceAccessor::Stat st; try { st = path2.lstat(); } catch (Error &) { continue; // ignore dangling symlinks in ~/.nix-defexpr } - if (isNixExpr(path2, st) && (st.type != InputAccessor::tRegular || hasSuffix(path2.baseName(), ".nix"))) { + if (isNixExpr(path2, st) && (st.type != SourceAccessor::tRegular || hasSuffix(path2.baseName(), ".nix"))) { /* Strip off the `.nix' filename suffix (if applicable), otherwise the attribute cannot be selected with the `-A' option. Useful if you want to stick a Nix @@ -149,7 +149,7 @@ static void getAllExprs(EvalState & state, throw Error("too many Nix expressions in directory '%1%'", path); attrs.alloc(attrName).mkApp(&state.getBuiltin("import"), vArg); } - else if (st.type == InputAccessor::tDirectory) + else if (st.type == SourceAccessor::tDirectory) /* `path2' is a directory (with no default.nix in it); recurse into it. */ getAllExprs(state, path2, seen, attrs); @@ -171,7 +171,7 @@ static void loadSourceExpr(EvalState & state, const SourcePath & path, Value & v set flat, not nested, to make it easier for a user to have a ~/.nix-defexpr directory that includes some system-wide directory). */ - else if (st.type == InputAccessor::tDirectory) { + else if (st.type == SourceAccessor::tDirectory) { auto attrs = state.buildBindings(maxAttrs); attrs.insert(state.symbols.create("_combineChannels"), &state.vEmptyList); StringSet seen; @@ -1525,7 +1525,7 @@ static int main_nix_env(int argc, char * * argv) auto store = openStore(); - globals.state = std::shared_ptr(new EvalState(myArgs.searchPath, store)); + globals.state = std::shared_ptr(new EvalState(myArgs.lookupPath, store)); globals.state->repair = myArgs.repair; globals.instSource.nixExprPath = std::make_shared( diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 1e17282254ff..35664374ceaf 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -157,7 +157,7 @@ static int main_nix_instantiate(int argc, char * * argv) auto store = openStore(); auto evalStore = myArgs.evalStoreUrl ? openStore(*myArgs.evalStoreUrl) : store; - auto state = std::make_unique(myArgs.searchPath, evalStore, store); + auto state = std::make_unique(myArgs.lookupPath, evalStore, store); state->repair = myArgs.repair; Bindings & autoArgs = *myArgs.getAutoArgs(*state); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 719675cba2ef..b23d99ad6a4c 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -184,7 +184,7 @@ static void opAdd(Strings opFlags, Strings opArgs) for (auto & i : opArgs) { auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i); cout << fmt("%s\n", store->printStorePath(store->addToStore( - std::string(baseNameOf(i)), accessor, canonPath))); + std::string(baseNameOf(i)), {accessor, canonPath}))); } } @@ -209,8 +209,7 @@ static void opAddFixed(Strings opFlags, Strings opArgs) auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i); std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow( baseNameOf(i), - accessor, - canonPath, + {accessor, canonPath}, method, hashAlgo).path)); } @@ -562,8 +561,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) #endif if (!hashGiven) { HashResult hash = hashPath( - *store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) }, - + {store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) }}, FileSerialisationMethod::Recursive, HashAlgorithm::SHA256); info->narHash = hash.first; info->narSize = hash.second; diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index 02154715f81b..af6743375256 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -41,9 +41,9 @@ struct CmdAddToStore : MixDryRun, StoreCommand auto storePath = dryRun ? store->computeStorePath( - *namePart, accessor, path2, caMethod, hashAlgo, {}).first + *namePart, {accessor, path2}, caMethod, hashAlgo, {}).first : store->addToStoreSlow( - *namePart, accessor, path2, caMethod, hashAlgo, {}).path; + *namePart, {accessor, path2}, caMethod, hashAlgo, {}).path; logger->cout("%s", store->printStorePath(storePath)); } diff --git a/src/nix/config-check.cc b/src/nix/config-check.cc index f7c4cebecc01..9575bf33887f 100644 --- a/src/nix/config-check.cc +++ b/src/nix/config-check.cc @@ -101,13 +101,14 @@ struct CmdConfigCheck : StoreCommand Path userEnv = canonPath(profileDir, true); if (store->isStorePath(userEnv) && hasSuffix(userEnv, "user-environment")) { - while (profileDir.find("/profiles/") == std::string::npos && isLink(profileDir)) + while (profileDir.find("/profiles/") == std::string::npos && std::filesystem::is_symlink(profileDir)) profileDir = absPath(readLink(profileDir), dirOf(profileDir)); if (profileDir.find("/profiles/") == std::string::npos) dirs.insert(dir); } - } catch (SystemError &) {} + } catch (SystemError &) { + } catch (std::filesystem::filesystem_error &) {} } if (!dirs.empty()) { diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 35d3da9120cb..08d44d7aa804 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -336,8 +336,8 @@ struct Common : InstallableCommand, MixProfile std::string makeRcScript( ref store, const BuildEnvironment & buildEnvironment, - const Path & tmpDir, - const Path & outputsDir = absPath(".") + "/outputs") + const std::filesystem::path & tmpDir, + const std::filesystem::path & outputsDir = std::filesystem::path { absPath(".") } / "outputs") { // A list of colon-separated environment variables that should be // prepended to, rather than overwritten, in order to keep the shell usable. @@ -376,13 +376,19 @@ struct Common : InstallableCommand, MixProfile StringMap rewrites; if (buildEnvironment.providesStructuredAttrs()) { for (auto & [outputName, from] : BuildEnvironment::getAssociative(outputs->second)) { - rewrites.insert({from, outputsDir + "/" + outputName}); + rewrites.insert({ + from, + (outputsDir / outputName).string() + }); } } else { for (auto & outputName : BuildEnvironment::getStrings(outputs->second)) { auto from = buildEnvironment.vars.find(outputName); assert(from != buildEnvironment.vars.end()); - rewrites.insert({BuildEnvironment::getString(from->second), outputsDir + "/" + outputName}); + rewrites.insert({ + BuildEnvironment::getString(from->second), + (outputsDir / outputName).string(), + }); } } @@ -405,7 +411,7 @@ struct Common : InstallableCommand, MixProfile if (buildEnvironment.providesStructuredAttrs()) { fixupStructuredAttrs( - "sh", + PATHNG_LITERAL("sh"), "NIX_ATTRS_SH_FILE", buildEnvironment.getAttrsSH(), rewrites, @@ -413,7 +419,7 @@ struct Common : InstallableCommand, MixProfile tmpDir ); fixupStructuredAttrs( - "json", + PATHNG_LITERAL("json"), "NIX_ATTRS_JSON_FILE", buildEnvironment.getAttrsJSON(), rewrites, @@ -430,19 +436,21 @@ struct Common : InstallableCommand, MixProfile * that's accessible from the interactive shell session. */ void fixupStructuredAttrs( - const std::string & ext, + PathViewNG::string_view ext, const std::string & envVar, const std::string & content, StringMap & rewrites, const BuildEnvironment & buildEnvironment, - const Path & tmpDir) + const std::filesystem::path & tmpDir) { - auto targetFilePath = tmpDir + "/.attrs." + ext; - writeFile(targetFilePath, content); + auto targetFilePath = tmpDir / PATHNG_LITERAL(".attrs."); + targetFilePath += ext; + + writeFile(targetFilePath.string(), content); auto fileInBuilderEnv = buildEnvironment.vars.find(envVar); assert(fileInBuilderEnv != buildEnvironment.vars.end()); - rewrites.insert({BuildEnvironment::getString(fileInBuilderEnv->second), targetFilePath}); + rewrites.insert({BuildEnvironment::getString(fileInBuilderEnv->second), targetFilePath.string()}); } Strings getDefaultFlakeAttrPaths() override @@ -578,7 +586,7 @@ struct CmdDevelop : Common, MixEnvironment AutoDelete tmpDir(createTempDir("", "nix-develop"), true); - auto script = makeRcScript(store, buildEnvironment, (Path) tmpDir); + auto script = makeRcScript(store, buildEnvironment, tmpDir); if (verbosity >= lvlDebug) script += "set -x\n"; @@ -687,7 +695,7 @@ struct CmdDevelop : Common, MixEnvironment } } - runProgramInStore(store, UseSearchPath::Use, shell, args, buildEnvironment.getSystem()); + runProgramInStore(store, UseLookupPath::Use, shell, args, buildEnvironment.getSystem()); #endif } }; diff --git a/src/nix/flake.cc b/src/nix/flake.cc index bd6327ac0bcd..6bf694e08afe 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -454,11 +454,6 @@ struct CmdFlakeCheck : FlakeCommand if (v.payload.lambda.fun->hasFormals() || !argHasName(v.payload.lambda.fun->arg, "final")) throw Error("overlay does not take an argument named 'final'"); - auto body = dynamic_cast(v.payload.lambda.fun->body); - if (!body - || body->hasFormals() - || !argHasName(body->arg, "prev")) - throw Error("overlay does not take an argument named 'prev'"); // FIXME: if we have a 'nixpkgs' input, use it to // evaluate the overlay. } catch (Error & e) { @@ -871,9 +866,9 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand { createDirs(to); - for (auto & entry : readDirectory(from)) { - auto from2 = from + "/" + entry.name; - auto to2 = to + "/" + entry.name; + for (auto & entry : std::filesystem::directory_iterator{from}) { + auto from2 = entry.path().string(); + auto to2 = to + "/" + entry.path().filename().string(); auto st = lstat(from2); if (S_ISDIR(st.st_mode)) copyDir(from2, to2); diff --git a/src/nix/flake.md b/src/nix/flake.md index d8b5bf435f1f..661dd2f73335 100644 --- a/src/nix/flake.md +++ b/src/nix/flake.md @@ -439,7 +439,7 @@ The following attributes are supported in `flake.nix`: - [`bash-prompt-prefix`](@docroot@/command-ref/conf-file.md#conf-bash-prompt-prefix) - [`bash-prompt-suffix`](@docroot@/command-ref/conf-file.md#conf-bash-prompt-suffix) - [`flake-registry`](@docroot@/command-ref/conf-file.md#conf-flake-registry) - - [`commit-lockfile-summary`](@docroot@/command-ref/conf-file.md#conf-commit-lockfile-summary) + - [`commit-lock-file-summary`](@docroot@/command-ref/conf-file.md#conf-commit-lock-file-summary) ## Flake inputs diff --git a/src/nix/fmt.cc b/src/nix/fmt.cc index 059904150f30..4b0fbb89dfb3 100644 --- a/src/nix/fmt.cc +++ b/src/nix/fmt.cc @@ -49,7 +49,7 @@ struct CmdFmt : SourceExprCommand { } } - runProgramInStore(store, UseSearchPath::DontUse, app.program, programArgs); + runProgramInStore(store, UseLookupPath::DontUse, app.program, programArgs); }; }; diff --git a/src/nix/hash.cc b/src/nix/hash.cc index f849bf0cfae6..f969886eaf0f 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -87,30 +87,29 @@ struct CmdHashBase : Command return std::make_unique(hashAlgo); }; - auto [accessor_, canonPath] = PosixSourceAccessor::createAtRoot(path); - auto & accessor = accessor_; + auto path2 = PosixSourceAccessor::createAtRoot(path); Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++ switch (mode) { case FileIngestionMethod::Flat: case FileIngestionMethod::Recursive: { auto hashSink = makeSink(); - dumpPath(accessor, canonPath, *hashSink, (FileSerialisationMethod) mode); + dumpPath(path2, *hashSink, (FileSerialisationMethod) mode); h = hashSink->finish().first; break; } case FileIngestionMethod::Git: { std::function hook; - hook = [&](const CanonPath & path) -> git::TreeEntry { + hook = [&](const SourcePath & path) -> git::TreeEntry { auto hashSink = makeSink(); - auto mode = dump(accessor, path, *hashSink, hook); + auto mode = dump(path, *hashSink, hook); auto hash = hashSink->finish().first; return { .mode = mode, .hash = hash, }; }; - h = hook(canonPath).hash; + h = hook(path2).hash; break; } } diff --git a/src/nix/main.cc b/src/nix/main.cc index cafb1e1c6e8c..f20042948cad 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -14,9 +14,10 @@ #include "finally.hh" #include "loggers.hh" #include "markdown.hh" -#include "memory-input-accessor.hh" +#include "memory-source-accessor.hh" #include "terminal.hh" #include "users.hh" +#include "network-proxy.hh" #include "eval-cache.hh" #include @@ -42,27 +43,6 @@ void chrootHelper(int argc, char * * argv); namespace nix { -#ifdef _WIN32 -[[maybe_unused]] -#endif -static bool haveProxyEnvironmentVariables() -{ - static const std::vector proxyVariables = { - "http_proxy", - "https_proxy", - "ftp_proxy", - "HTTP_PROXY", - "HTTPS_PROXY", - "FTP_PROXY" - }; - for (auto & proxyVariable: proxyVariables) { - if (getEnv(proxyVariable).has_value()) { - return true; - } - } - return false; -} - /* Check if we have a non-loopback/link-local network interface. */ static bool haveInternet() { @@ -87,7 +67,7 @@ static bool haveInternet() } } - if (haveProxyEnvironmentVariables()) return true; + if (haveNetworkProxyConnection()) return true; return false; #else diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 8e6a2e805cdb..3ce52acc5631 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -87,7 +87,7 @@ std::tuple prefetchFile( if (!storePath) { AutoDelete tmpDir(createTempDir(), true); - Path tmpFile = (Path) tmpDir + "/tmp"; + std::filesystem::path tmpFile = tmpDir.path() / "tmp"; /* Download the file. */ { @@ -95,7 +95,7 @@ std::tuple prefetchFile( if (executable) mode = 0700; - AutoCloseFD fd = toDescriptor(open(tmpFile.c_str(), O_WRONLY | O_CREAT | O_EXCL, mode)); + AutoCloseFD fd = toDescriptor(open(tmpFile.string().c_str(), O_WRONLY | O_CREAT | O_EXCL, mode)); if (!fd) throw SysError("creating temporary file '%s'", tmpFile); FdSink sink(fd.get()); @@ -109,15 +109,16 @@ std::tuple prefetchFile( if (unpack) { Activity act(*logger, lvlChatty, actUnknown, fmt("unpacking '%s'", url)); - Path unpacked = (Path) tmpDir + "/unpacked"; + auto unpacked = (tmpDir.path() / "unpacked").string(); createDirs(unpacked); - unpackTarfile(tmpFile, unpacked); + unpackTarfile(tmpFile.string(), unpacked); /* If the archive unpacks to a single file/directory, then use that as the top-level. */ - auto entries = readDirectory(unpacked); - if (entries.size() == 1) - tmpFile = unpacked + "/" + entries[0].name; + auto entries = std::filesystem::directory_iterator{unpacked}; + auto file_count = std::distance(entries, std::filesystem::directory_iterator{}); + if (file_count == 1) + tmpFile = entries->path(); else tmpFile = unpacked; } @@ -125,9 +126,8 @@ std::tuple prefetchFile( Activity act(*logger, lvlChatty, actUnknown, fmt("adding '%s' to the store", url)); - auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(tmpFile); auto info = store->addToStoreSlow( - *name, accessor, canonPath, + *name, PosixSourceAccessor::createAtRoot(tmpFile), ingestionMethod, hashAlgo, {}, expectedHash); storePath = info.path; assert(info.ca); @@ -193,7 +193,7 @@ static int main_nix_prefetch_url(int argc, char * * argv) startProgressBar(); auto store = openStore(); - auto state = std::make_unique(myArgs.searchPath, store); + auto state = std::make_unique(myArgs.lookupPath, store); Bindings & autoArgs = *myArgs.getAutoArgs(*state); diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 8bbfe0f07659..a2f3e033e72f 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -78,7 +78,7 @@ struct CmdRepl : RawInstallablesCommand return values; }; auto repl = AbstractNixRepl::create( - searchPath, + lookupPath, openStore(), state, getValues diff --git a/src/nix/run.cc b/src/nix/run.cc index c456833029e0..cc999ddf4f47 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -25,7 +25,7 @@ std::string chrootHelperName = "__run_in_chroot"; namespace nix { void runProgramInStore(ref store, - UseSearchPath useSearchPath, + UseLookupPath useLookupPath, const std::string & program, const Strings & args, std::optional system) @@ -61,7 +61,7 @@ void runProgramInStore(ref store, linux::setPersonality(*system); #endif - if (useSearchPath == UseSearchPath::Use) + if (useLookupPath == UseLookupPath::Use) execvp(program.c_str(), stringsToCharPtrs(args).data()); else execv(program.c_str(), stringsToCharPtrs(args).data()); @@ -142,7 +142,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment Strings args; for (auto & arg : command) args.push_back(arg); - runProgramInStore(store, UseSearchPath::Use, *command.begin(), args); + runProgramInStore(store, UseLookupPath::Use, *command.begin(), args); } }; @@ -204,7 +204,7 @@ struct CmdRun : InstallableValueCommand Strings allArgs{app.program}; for (auto & i : args) allArgs.push_back(i); - runProgramInStore(store, UseSearchPath::DontUse, app.program, allArgs); + runProgramInStore(store, UseLookupPath::DontUse, app.program, allArgs); } }; @@ -248,9 +248,9 @@ void chrootHelper(int argc, char * * argv) if (mount(realStoreDir.c_str(), (tmpDir + storeDir).c_str(), "", MS_BIND, 0) == -1) throw SysError("mounting '%s' on '%s'", realStoreDir, storeDir); - for (auto entry : readDirectory("/")) { - auto src = "/" + entry.name; - Path dst = tmpDir + "/" + entry.name; + for (auto entry : std::filesystem::directory_iterator{"/"}) { + auto src = entry.path().string(); + Path dst = tmpDir + "/" + entry.path().filename().string(); if (pathExists(dst)) continue; auto st = lstat(src); if (S_ISDIR(st.st_mode)) { diff --git a/src/nix/run.hh b/src/nix/run.hh index a55917b06d73..2fe6ed86ae6b 100644 --- a/src/nix/run.hh +++ b/src/nix/run.hh @@ -5,13 +5,13 @@ namespace nix { -enum struct UseSearchPath { +enum struct UseLookupPath { Use, DontUse }; void runProgramInStore(ref store, - UseSearchPath useSearchPath, + UseLookupPath useLookupPath, const std::string & program, const Strings & args, std::optional system = std::nullopt); diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index 4c7a74e16f19..17d1edb97e33 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -121,7 +121,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand Path profileDir = dirOf(where); // Resolve profile to /nix/var/nix/profiles/ link. - while (canonPath(profileDir).find("/profiles/") == std::string::npos && isLink(profileDir)) + while (canonPath(profileDir).find("/profiles/") == std::string::npos && std::filesystem::is_symlink(profileDir)) profileDir = readLink(profileDir); printInfo("found profile '%s'", profileDir); @@ -147,7 +147,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand auto req = FileTransferRequest((std::string&) settings.upgradeNixStorePathUrl); auto res = getFileTransfer()->download(req); - auto state = std::make_unique(SearchPath{}, store); + auto state = std::make_unique(LookupPath{}, store); auto v = state->allocValue(); state->eval(state->parseExprFromString(res.data, state->rootPath(CanonPath("/no-such-path"))), *v); Bindings & bindings(*state->allocBindings(0)); diff --git a/tests/functional/common.sh b/tests/functional/common.sh index 7b0922c9f320..4ec17b70664f 100644 --- a/tests/functional/common.sh +++ b/tests/functional/common.sh @@ -4,7 +4,11 @@ if [[ -z "${COMMON_SH_SOURCED-}" ]]; then COMMON_SH_SOURCED=1 -source "$(readlink -f "$(dirname "${BASH_SOURCE[0]-$0}")")/common/vars-and-functions.sh" +dir="$(readlink -f "$(dirname "${BASH_SOURCE[0]-$0}")")" + +source "$dir"/common/vars-and-functions.sh +source "$dir"/common/init.sh + if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then startDaemon fi diff --git a/tests/functional/init.sh b/tests/functional/common/init.sh similarity index 66% rename from tests/functional/init.sh rename to tests/functional/common/init.sh index 97b1b058753e..74da126517ad 100755 --- a/tests/functional/init.sh +++ b/tests/functional/common/init.sh @@ -1,14 +1,13 @@ -# Don't start the daemon -source common/vars-and-functions.sh - test -n "$TEST_ROOT" -if test -d "$TEST_ROOT"; then - chmod -R u+rw "$TEST_ROOT" - # We would delete any daemon socket, so let's stop the daemon first. - killDaemon +# We would delete any daemon socket, so let's stop the daemon first. +killDaemon +# Destroy the test directory that may have persisted from previous runs +if [[ -e "$TEST_ROOT" ]]; then + chmod -R u+w "$TEST_ROOT" rm -rf "$TEST_ROOT" fi -mkdir "$TEST_ROOT" +mkdir -p "$TEST_ROOT" +mkdir "$TEST_HOME" mkdir "$NIX_STORE_DIR" mkdir "$NIX_LOCALSTATE_DIR" @@ -36,7 +35,7 @@ extra-experimental-features = flakes EOF # Initialise the database. +# The flag itself does nothing, but running the command touches the store nix-store --init - -# Did anything happen? +# Sanity check test -e "$NIX_STATE_DIR"/db/db.sqlite diff --git a/tests/functional/common/vars-and-functions.sh.in b/tests/functional/common/vars-and-functions.sh.in index e7e2fc770026..cb1f0d566168 100644 --- a/tests/functional/common/vars-and-functions.sh.in +++ b/tests/functional/common/vars-and-functions.sh.in @@ -1,3 +1,5 @@ +# NOTE: instances of @variable@ are substituted as defined in /mk/templates.mk + set -eu -o pipefail if [[ -z "${COMMON_VARS_AND_FUNCTIONS_SH_SOURCED-}" ]]; then @@ -34,7 +36,6 @@ unset XDG_DATA_HOME unset XDG_CONFIG_HOME unset XDG_CONFIG_DIRS unset XDG_CACHE_HOME -mkdir -p $TEST_HOME export PATH=@bindir@:$PATH if [[ -n "${NIX_CLIENT_PACKAGE:-}" ]]; then diff --git a/tests/functional/config.sh b/tests/functional/config.sh index efdf2a95850e..efdafa8ca7de 100644 --- a/tests/functional/config.sh +++ b/tests/functional/config.sh @@ -66,4 +66,9 @@ exp_features=$(nix config show | grep '^experimental-features' | cut -d '=' -f 2 # Test that it's possible to retrieve a single setting's value val=$(nix config show | grep '^warn-dirty' | cut -d '=' -f 2 | xargs) val2=$(nix config show warn-dirty) -[[ $val == $val2 ]] \ No newline at end of file +[[ $val == $val2 ]] + +# Test unit prefixes. +[[ $(nix config show --min-free 64K min-free) = 65536 ]] +[[ $(nix config show --min-free 1M min-free) = 1048576 ]] +[[ $(nix config show --min-free 2G min-free) = 2147483648 ]] diff --git a/tests/functional/extra-sandbox-profile.nix b/tests/functional/extra-sandbox-profile.nix new file mode 100644 index 000000000000..aa680b918a65 --- /dev/null +++ b/tests/functional/extra-sandbox-profile.nix @@ -0,0 +1,19 @@ +{ destFile, seed }: + +with import ./config.nix; + +mkDerivation { + name = "simple"; + __sandboxProfile = '' + # Allow writing any file in the filesystem + (allow file*) + ''; + inherit seed; + buildCommand = '' + ( + set -x + touch ${destFile} + touch $out + ) + ''; +} diff --git a/tests/functional/extra-sandbox-profile.sh b/tests/functional/extra-sandbox-profile.sh new file mode 100644 index 000000000000..ac3ca036f3c6 --- /dev/null +++ b/tests/functional/extra-sandbox-profile.sh @@ -0,0 +1,23 @@ +source common.sh + +if [[ $(uname) != Darwin ]]; then skipTest "Need Darwin"; fi + +DEST_FILE="${TEST_ROOT}/foo" + +testSandboxProfile () ( + set -e + + sandboxMode="$1" + + rm -f "${DEST_FILE}" + nix-build --no-out-link ./extra-sandbox-profile.nix \ + --option sandbox "$sandboxMode" \ + --argstr seed "$RANDOM" \ + --argstr destFile "${DEST_FILE}" + + ls -l "${DEST_FILE}" +) + +testSandboxProfile "false" +expectStderr 2 testSandboxProfile "true" +testSandboxProfile "relaxed" diff --git a/tests/functional/fetchGitSubmodules.sh b/tests/functional/fetchGitSubmodules.sh index cd180815d69a..bd82a0a1755c 100644 --- a/tests/functional/fetchGitSubmodules.sh +++ b/tests/functional/fetchGitSubmodules.sh @@ -170,3 +170,45 @@ pathWithSubmodules=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = [[ -e $pathWithoutExportIgnore/exclude-from-root ]] [[ -e $pathWithoutExportIgnore/sub/exclude-from-sub ]] + +test_submodule_nested() { + local repoA=$TEST_ROOT/submodule_nested/a + local repoB=$TEST_ROOT/submodule_nested/b + local repoC=$TEST_ROOT/submodule_nested/c + + rm -rf $repoA $repoB $repoC $TEST_HOME/.cache/nix + + initGitRepo $repoC + touch $repoC/inside-c + git -C $repoC add inside-c + addGitContent $repoC + + initGitRepo $repoB + git -C $repoB submodule add $repoC c + git -C $repoB add c + addGitContent $repoB + + initGitRepo $repoA + git -C $repoA submodule add $repoB b + git -C $repoA add b + addGitContent $repoA + + + # Check non-worktree fetch + local rev=$(git -C $repoA rev-parse HEAD) + out=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repoA\"; rev = \"$rev\"; submodules = true; }).outPath") + test -e $out/b/c/inside-c + test -e $out/content + test -e $out/b/content + test -e $out/b/c/content + local nonWorktree=$out + + # Check worktree based fetch + # TODO: make it work without git submodule update + git -C $repoA submodule update --init --recursive + out=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repoA\"; submodules = true; }).outPath") + find $out + [[ $out == $nonWorktree ]] || { find $out; false; } + +} +test_submodule_nested diff --git a/tests/functional/fetchMercurial.sh b/tests/functional/fetchMercurial.sh index e133df1f8a00..9f7cef7b2e6e 100644 --- a/tests/functional/fetchMercurial.sh +++ b/tests/functional/fetchMercurial.sh @@ -101,6 +101,7 @@ path4=$(nix eval --impure --refresh --raw --expr "(builtins.fetchMercurial file: [[ $path2 = $path4 ]] echo paris > $repo/hello + # Passing a `name` argument should be reflected in the output path path5=$(nix eval -vvvvv --impure --refresh --raw --expr "(builtins.fetchMercurial { url = \"file://$repo\"; name = \"foo\"; } ).outPath") [[ $path5 =~ -foo$ ]] diff --git a/tests/functional/flakes/common.sh b/tests/functional/flakes/common.sh index fc45cf7bfa31..f83a02aba732 100644 --- a/tests/functional/flakes/common.sh +++ b/tests/functional/flakes/common.sh @@ -21,6 +21,12 @@ writeSimpleFlake() { # To test "nix flake init". legacyPackages.$system.hello = import ./simple.nix; + + parent = builtins.dirOf ./.; + + baseName = builtins.baseNameOf ./.; + + root = ./.; }; } EOF diff --git a/tests/functional/flakes/flakes.sh b/tests/functional/flakes/flakes.sh index 4f41cae0aef6..35b0c5d84917 100644 --- a/tests/functional/flakes/flakes.sh +++ b/tests/functional/flakes/flakes.sh @@ -231,6 +231,17 @@ nix build -o "$TEST_ROOT/result" --expr "(builtins.getFlake \"$flake1Dir\").pack # 'getFlake' on a locked flakeref should succeed even in pure mode. nix build -o "$TEST_ROOT/result" --expr "(builtins.getFlake \"git+file://$flake1Dir?rev=$hash2\").packages.$system.default" +# Regression test for dirOf on the root of the flake. +[[ $(nix eval --json flake1#parent) = \""$NIX_STORE_DIR"\" ]] + +# Regression test for baseNameOf on the root of the flake. +[[ $(nix eval --raw flake1#baseName) =~ ^[a-z0-9]+-source$ ]] + +# Test that the root of a tree returns a path named /nix/store/--source. +# This behavior is *not* desired, but has existed for a while. +# Issue #10627 what to do about it. +[[ $(nix eval --raw flake1#root) =~ ^.*/[a-z0-9]+-[a-z0-9]+-source$ ]] + # Building a flake with an unlocked dependency should fail in pure mode. (! nix build -o "$TEST_ROOT/result" flake2#bar --no-registries) (! nix build -o "$TEST_ROOT/result" flake2#bar --no-use-registries) diff --git a/tests/functional/gc-auto.sh b/tests/functional/gc-auto.sh index 521d9e53969d..281eef20da30 100644 --- a/tests/functional/gc-auto.sh +++ b/tests/functional/gc-auto.sh @@ -62,11 +62,11 @@ EOF ) nix build --impure -v -o $TEST_ROOT/result-A -L --expr "$expr" \ - --min-free 1000 --max-free 2000 --min-free-check-interval 1 & + --min-free 1K --max-free 2K --min-free-check-interval 1 & pid1=$! nix build --impure -v -o $TEST_ROOT/result-B -L --expr "$expr2" \ - --min-free 1000 --max-free 2000 --min-free-check-interval 1 & + --min-free 1K --max-free 2K --min-free-check-interval 1 & pid2=$! # Once the first build is done, unblock the second one. diff --git a/tests/functional/lang.sh b/tests/functional/lang.sh index e35795a7af00..c45326473d36 100755 --- a/tests/functional/lang.sh +++ b/tests/functional/lang.sh @@ -72,7 +72,7 @@ for i in lang/eval-fail-*.nix; do if [[ -e "lang/$i.flags" ]]; then sed -e 's/#.*//' < "lang/$i.flags" else - # note that show-trace is also set by init.sh + # note that show-trace is also set by common/init.sh echo "--eval --strict --show-trace" fi )" diff --git a/tests/functional/local-overlay-store/add-lower.sh b/tests/functional/local-overlay-store/add-lower.sh index f0ac46a91b5c..33bf20ebdd34 100755 --- a/tests/functional/local-overlay-store/add-lower.sh +++ b/tests/functional/local-overlay-store/add-lower.sh @@ -1,4 +1,5 @@ source common.sh +source ../common/init.sh requireEnvironment setupConfig diff --git a/tests/functional/local-overlay-store/bad-uris.sh b/tests/functional/local-overlay-store/bad-uris.sh index 2517681dd84c..42a6d47f756d 100644 --- a/tests/functional/local-overlay-store/bad-uris.sh +++ b/tests/functional/local-overlay-store/bad-uris.sh @@ -1,4 +1,5 @@ source common.sh +source ../common/init.sh requireEnvironment setupConfig diff --git a/tests/functional/local-overlay-store/build.sh b/tests/functional/local-overlay-store/build.sh index 758585400d86..2251be7e788d 100755 --- a/tests/functional/local-overlay-store/build.sh +++ b/tests/functional/local-overlay-store/build.sh @@ -1,4 +1,5 @@ source common.sh +source ../common/init.sh requireEnvironment setupConfig diff --git a/tests/functional/local-overlay-store/check-post-init.sh b/tests/functional/local-overlay-store/check-post-init.sh index 985bf978e5d6..e0c2602762dd 100755 --- a/tests/functional/local-overlay-store/check-post-init.sh +++ b/tests/functional/local-overlay-store/check-post-init.sh @@ -1,4 +1,5 @@ source common.sh +source ../common/init.sh requireEnvironment setupConfig diff --git a/tests/functional/local-overlay-store/common.sh b/tests/functional/local-overlay-store/common.sh index 2634f8c8f840..0e60978617f7 100644 --- a/tests/functional/local-overlay-store/common.sh +++ b/tests/functional/local-overlay-store/common.sh @@ -1,4 +1,4 @@ -source ../common.sh +source ../common/vars-and-functions.sh # The new Linux mount interface does not seem to support remounting # OverlayFS mount points. @@ -37,10 +37,9 @@ addConfig () { setupConfig () { addConfig "require-drop-supplementary-groups = false" addConfig "build-users-group = " + enableFeatures "local-overlay-store" } -enableFeatures "local-overlay-store" - setupStoreDirs () { # Attempt to create store dirs on tmpfs volume. # This ensures lowerdir, upperdir and workdir will be on diff --git a/tests/functional/local-overlay-store/delete-duplicate.sh b/tests/functional/local-overlay-store/delete-duplicate.sh index 0c0b1a3b2026..e3b94e1cb741 100644 --- a/tests/functional/local-overlay-store/delete-duplicate.sh +++ b/tests/functional/local-overlay-store/delete-duplicate.sh @@ -1,4 +1,5 @@ source common.sh +source ../common/init.sh requireEnvironment setupConfig diff --git a/tests/functional/local-overlay-store/delete-refs.sh b/tests/functional/local-overlay-store/delete-refs.sh index 942d7fbdc1cf..62295aaa19a3 100755 --- a/tests/functional/local-overlay-store/delete-refs.sh +++ b/tests/functional/local-overlay-store/delete-refs.sh @@ -1,4 +1,5 @@ source common.sh +source ../common/init.sh requireEnvironment setupConfig diff --git a/tests/functional/local-overlay-store/gc.sh b/tests/functional/local-overlay-store/gc.sh index 1e1fb203ea82..f3420d0b8133 100755 --- a/tests/functional/local-overlay-store/gc.sh +++ b/tests/functional/local-overlay-store/gc.sh @@ -1,4 +1,5 @@ source common.sh +source ../common/init.sh requireEnvironment setupConfig diff --git a/tests/functional/local-overlay-store/optimise.sh b/tests/functional/local-overlay-store/optimise.sh index 569afa248a91..a524a675e1cc 100755 --- a/tests/functional/local-overlay-store/optimise.sh +++ b/tests/functional/local-overlay-store/optimise.sh @@ -1,4 +1,5 @@ source common.sh +source ../common/init.sh requireEnvironment setupConfig diff --git a/tests/functional/local-overlay-store/redundant-add.sh b/tests/functional/local-overlay-store/redundant-add.sh index fbd4799e74a2..b4f04b2e1ebc 100755 --- a/tests/functional/local-overlay-store/redundant-add.sh +++ b/tests/functional/local-overlay-store/redundant-add.sh @@ -1,4 +1,5 @@ source common.sh +source ../common/init.sh requireEnvironment setupConfig diff --git a/tests/functional/local-overlay-store/stale-file-handle.sh b/tests/functional/local-overlay-store/stale-file-handle.sh index 5e75628ca471..684b8ce23d47 100755 --- a/tests/functional/local-overlay-store/stale-file-handle.sh +++ b/tests/functional/local-overlay-store/stale-file-handle.sh @@ -1,4 +1,5 @@ source common.sh +source ../common/init.sh requireEnvironment setupConfig diff --git a/tests/functional/local-overlay-store/verify.sh b/tests/functional/local-overlay-store/verify.sh index 8b44603ff834..d73d1a57d667 100755 --- a/tests/functional/local-overlay-store/verify.sh +++ b/tests/functional/local-overlay-store/verify.sh @@ -1,4 +1,5 @@ source common.sh +source ../common/init.sh requireEnvironment setupConfig diff --git a/tests/functional/local.mk b/tests/functional/local.mk index 5cc4f8e2f8ed..69e2618f2acd 100644 --- a/tests/functional/local.mk +++ b/tests/functional/local.mk @@ -1,6 +1,5 @@ nix_tests = \ test-infra.sh \ - init.sh \ flakes/flakes.sh \ flakes/develop.sh \ flakes/run.sh \ @@ -131,6 +130,7 @@ nix_tests = \ nested-sandboxing.sh \ impure-env.sh \ debugger.sh \ + extra-sandbox-profile.sh \ help.sh ifeq ($(HAVE_LIBCPUID), 1) diff --git a/tests/functional/remote-store.sh b/tests/functional/remote-store.sh index cc5dd18338ae..e2c16f18ad66 100644 --- a/tests/functional/remote-store.sh +++ b/tests/functional/remote-store.sh @@ -23,7 +23,7 @@ fi # Test import-from-derivation through the daemon. [[ $(nix eval --impure --raw --file ./ifd.nix) = hi ]] -storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh +NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs-test-case.sh nix-store --gc --max-freed 1K diff --git a/tests/functional/user-envs-test-case.sh b/tests/functional/user-envs-test-case.sh new file mode 100644 index 000000000000..f4a90a675056 --- /dev/null +++ b/tests/functional/user-envs-test-case.sh @@ -0,0 +1,191 @@ +clearProfiles + +# Query installed: should be empty. +test "$(nix-env -p $profiles/test -q '*' | wc -l)" -eq 0 + +nix-env --switch-profile $profiles/test + +# Query available: should contain several. +test "$(nix-env -f ./user-envs.nix -qa '*' | wc -l)" -eq 6 +outPath10=$(nix-env -f ./user-envs.nix -qa --out-path --no-name '*' | grep foo-1.0) +drvPath10=$(nix-env -f ./user-envs.nix -qa --drv-path --no-name '*' | grep foo-1.0) +[ -n "$outPath10" -a -n "$drvPath10" ] + +# Query with json +nix-env -f ./user-envs.nix -qa --json | jq -e '.[] | select(.name == "bar-0.1") | [ + .outputName == "out", + .outputs.out == null +] | all' +nix-env -f ./user-envs.nix -qa --json --out-path | jq -e '.[] | select(.name == "bar-0.1") | [ + .outputName == "out", + (.outputs.out | test("'$NIX_STORE_DIR'.*-0\\.1")) +] | all' +nix-env -f ./user-envs.nix -qa --json --drv-path | jq -e '.[] | select(.name == "bar-0.1") | (.drvPath | test("'$NIX_STORE_DIR'.*-0\\.1\\.drv"))' + +# Query descriptions. +nix-env -f ./user-envs.nix -qa '*' --description | grepQuiet silly +rm -rf $HOME/.nix-defexpr +ln -s $(pwd)/user-envs.nix $HOME/.nix-defexpr +nix-env -qa '*' --description | grepQuiet silly + +# Query the system. +nix-env -qa '*' --system | grepQuiet $system + +# Install "foo-1.0". +nix-env -i foo-1.0 + +# Query installed: should contain foo-1.0 now (which should be +# executable). +test "$(nix-env -q '*' | wc -l)" -eq 1 +nix-env -q '*' | grepQuiet foo-1.0 +test "$($profiles/test/bin/foo)" = "foo-1.0" + +# Test nix-env -qc to compare installed against available packages, and vice versa. +nix-env -qc '*' | grepQuiet '< 2.0' +nix-env -qac '*' | grepQuiet '> 1.0' + +# Test the -b flag to filter out source-only packages. +[ "$(nix-env -qab | wc -l)" -eq 1 ] + +# Test the -s flag to get package status. +nix-env -qas | grepQuiet 'IP- foo-1.0' +nix-env -qas | grepQuiet -- '--- bar-0.1' + +# Disable foo. +nix-env --set-flag active false foo +(! [ -e "$profiles/test/bin/foo" ]) + +# Enable foo. +nix-env --set-flag active true foo +[ -e "$profiles/test/bin/foo" ] + +# Store the path of foo-1.0. +outPath10_=$(nix-env -q --out-path --no-name '*' | grep foo-1.0) +echo "foo-1.0 = $outPath10" +[ "$outPath10" = "$outPath10_" ] + +# Install "foo-2.0pre1": should remove foo-1.0. +nix-env -i foo-2.0pre1 + +# Query installed: should contain foo-2.0pre1 now. +test "$(nix-env -q '*' | wc -l)" -eq 1 +nix-env -q '*' | grepQuiet foo-2.0pre1 +test "$($profiles/test/bin/foo)" = "foo-2.0pre1" + +# Upgrade "foo": should install foo-2.0. +NIX_PATH=nixpkgs=./user-envs.nix:${NIX_PATH-} nix-env -f '' -u foo + +# Query installed: should contain foo-2.0 now. +test "$(nix-env -q '*' | wc -l)" -eq 1 +nix-env -q '*' | grepQuiet foo-2.0 +test "$($profiles/test/bin/foo)" = "foo-2.0" + +# Store the path of foo-2.0. +outPath20=$(nix-env -q --out-path --no-name '*' | grep foo-2.0) +test -n "$outPath20" + +# Install bar-0.1, uninstall foo. +nix-env -i bar-0.1 +nix-env -e foo + +# Query installed: should only contain bar-0.1 now. +if nix-env -q '*' | grepQuiet foo; then false; fi +nix-env -q '*' | grepQuiet bar + +# Rollback: should bring "foo" back. +oldGen="$(nix-store -q --resolve $profiles/test)" +nix-env --rollback +[ "$(nix-store -q --resolve $profiles/test)" != "$oldGen" ] +nix-env -q '*' | grepQuiet foo-2.0 +nix-env -q '*' | grepQuiet bar + +# Rollback again: should remove "bar". +nix-env --rollback +nix-env -q '*' | grepQuiet foo-2.0 +if nix-env -q '*' | grepQuiet bar; then false; fi + +# Count generations. +nix-env --list-generations +test "$(nix-env --list-generations | wc -l)" -eq 7 + +# Doing the same operation twice results in the same generation, which triggers +# "lazy" behaviour and does not create a new symlink. + +nix-env -i foo +nix-env -i foo + +# Count generations. +nix-env --list-generations +test "$(nix-env --list-generations | wc -l)" -eq 8 + +# Switch to a specified generation. +nix-env --switch-generation 7 +[ "$(nix-store -q --resolve $profiles/test)" = "$oldGen" ] + +# Install foo-1.0, now using its store path. +nix-env -i "$outPath10" +nix-env -q '*' | grepQuiet foo-1.0 +nix-store -qR $profiles/test | grep "$outPath10" +nix-store -q --referrers-closure $profiles/test | grep "$(nix-store -q --resolve $profiles/test)" +[ "$(nix-store -q --deriver "$outPath10")" = $drvPath10 ] + +# Uninstall foo-1.0, using a symlink to its store path. +ln -sfn $outPath10/bin/foo $TEST_ROOT/symlink +nix-env -e $TEST_ROOT/symlink +if nix-env -q '*' | grepQuiet foo; then false; fi +nix-store -qR $profiles/test | grepInverse "$outPath10" + +# Install foo-1.0, now using a symlink to its store path. +nix-env -i $TEST_ROOT/symlink +nix-env -q '*' | grepQuiet foo + +# Delete all old generations. +nix-env --delete-generations old + +# Run the garbage collector. This should get rid of foo-2.0 but not +# foo-1.0. +nix-collect-garbage +test -e "$outPath10" +(! [ -e "$outPath20" ]) + +# Uninstall everything +nix-env -e '*' +test "$(nix-env -q '*' | wc -l)" -eq 0 + +# Installing "foo" should only install the newest foo. +nix-env -i foo +test "$(nix-env -q '*' | grep foo- | wc -l)" -eq 1 +nix-env -q '*' | grepQuiet foo-2.0 + +# On the other hand, this should install both (and should fail due to +# a collision). +nix-env -e '*' +(! nix-env -i foo-1.0 foo-2.0) + +# Installing "*" should install one foo and one bar. +nix-env -e '*' +nix-env -i '*' +test "$(nix-env -q '*' | wc -l)" -eq 2 +nix-env -q '*' | grepQuiet foo-2.0 +nix-env -q '*' | grepQuiet bar-0.1.1 + +# Test priorities: foo-0.1 has a lower priority than foo-1.0, so it +# should be possible to install both without a collision. Also test +# ‘--set-flag priority’ to manually override the declared priorities. +nix-env -e '*' +nix-env -i foo-0.1 foo-1.0 +[ "$($profiles/test/bin/foo)" = "foo-1.0" ] +nix-env --set-flag priority 1 foo-0.1 +[ "$($profiles/test/bin/foo)" = "foo-0.1" ] + +# Test nix-env --set. +nix-env --set $outPath10 +[ "$(nix-store -q --resolve $profiles/test)" = $outPath10 ] +nix-env --set $drvPath10 +[ "$(nix-store -q --resolve $profiles/test)" = $outPath10 ] + +# Test the case where $HOME contains a symlink. +mkdir -p $TEST_ROOT/real-home/alice/.nix-defexpr/channels +ln -sfn $TEST_ROOT/real-home $TEST_ROOT/home +ln -sfn $(pwd)/user-envs.nix $TEST_ROOT/home/alice/.nix-defexpr/channels/foo +HOME=$TEST_ROOT/home/alice nix-env -i foo-0.1 diff --git a/tests/functional/user-envs.sh b/tests/functional/user-envs.sh index 7c643f3553cb..a849d5439946 100644 --- a/tests/functional/user-envs.sh +++ b/tests/functional/user-envs.sh @@ -1,197 +1,3 @@ -source common.sh +source ./common.sh -if [ -z "${storeCleared-}" ]; then - clearStore -fi - -clearProfiles - -# Query installed: should be empty. -test "$(nix-env -p $profiles/test -q '*' | wc -l)" -eq 0 - -nix-env --switch-profile $profiles/test - -# Query available: should contain several. -test "$(nix-env -f ./user-envs.nix -qa '*' | wc -l)" -eq 6 -outPath10=$(nix-env -f ./user-envs.nix -qa --out-path --no-name '*' | grep foo-1.0) -drvPath10=$(nix-env -f ./user-envs.nix -qa --drv-path --no-name '*' | grep foo-1.0) -[ -n "$outPath10" -a -n "$drvPath10" ] - -# Query with json -nix-env -f ./user-envs.nix -qa --json | jq -e '.[] | select(.name == "bar-0.1") | [ - .outputName == "out", - .outputs.out == null -] | all' -nix-env -f ./user-envs.nix -qa --json --out-path | jq -e '.[] | select(.name == "bar-0.1") | [ - .outputName == "out", - (.outputs.out | test("'$NIX_STORE_DIR'.*-0\\.1")) -] | all' -nix-env -f ./user-envs.nix -qa --json --drv-path | jq -e '.[] | select(.name == "bar-0.1") | (.drvPath | test("'$NIX_STORE_DIR'.*-0\\.1\\.drv"))' - -# Query descriptions. -nix-env -f ./user-envs.nix -qa '*' --description | grepQuiet silly -rm -rf $HOME/.nix-defexpr -ln -s $(pwd)/user-envs.nix $HOME/.nix-defexpr -nix-env -qa '*' --description | grepQuiet silly - -# Query the system. -nix-env -qa '*' --system | grepQuiet $system - -# Install "foo-1.0". -nix-env -i foo-1.0 - -# Query installed: should contain foo-1.0 now (which should be -# executable). -test "$(nix-env -q '*' | wc -l)" -eq 1 -nix-env -q '*' | grepQuiet foo-1.0 -test "$($profiles/test/bin/foo)" = "foo-1.0" - -# Test nix-env -qc to compare installed against available packages, and vice versa. -nix-env -qc '*' | grepQuiet '< 2.0' -nix-env -qac '*' | grepQuiet '> 1.0' - -# Test the -b flag to filter out source-only packages. -[ "$(nix-env -qab | wc -l)" -eq 1 ] - -# Test the -s flag to get package status. -nix-env -qas | grepQuiet 'IP- foo-1.0' -nix-env -qas | grepQuiet -- '--- bar-0.1' - -# Disable foo. -nix-env --set-flag active false foo -(! [ -e "$profiles/test/bin/foo" ]) - -# Enable foo. -nix-env --set-flag active true foo -[ -e "$profiles/test/bin/foo" ] - -# Store the path of foo-1.0. -outPath10_=$(nix-env -q --out-path --no-name '*' | grep foo-1.0) -echo "foo-1.0 = $outPath10" -[ "$outPath10" = "$outPath10_" ] - -# Install "foo-2.0pre1": should remove foo-1.0. -nix-env -i foo-2.0pre1 - -# Query installed: should contain foo-2.0pre1 now. -test "$(nix-env -q '*' | wc -l)" -eq 1 -nix-env -q '*' | grepQuiet foo-2.0pre1 -test "$($profiles/test/bin/foo)" = "foo-2.0pre1" - -# Upgrade "foo": should install foo-2.0. -NIX_PATH=nixpkgs=./user-envs.nix:${NIX_PATH-} nix-env -f '' -u foo - -# Query installed: should contain foo-2.0 now. -test "$(nix-env -q '*' | wc -l)" -eq 1 -nix-env -q '*' | grepQuiet foo-2.0 -test "$($profiles/test/bin/foo)" = "foo-2.0" - -# Store the path of foo-2.0. -outPath20=$(nix-env -q --out-path --no-name '*' | grep foo-2.0) -test -n "$outPath20" - -# Install bar-0.1, uninstall foo. -nix-env -i bar-0.1 -nix-env -e foo - -# Query installed: should only contain bar-0.1 now. -if nix-env -q '*' | grepQuiet foo; then false; fi -nix-env -q '*' | grepQuiet bar - -# Rollback: should bring "foo" back. -oldGen="$(nix-store -q --resolve $profiles/test)" -nix-env --rollback -[ "$(nix-store -q --resolve $profiles/test)" != "$oldGen" ] -nix-env -q '*' | grepQuiet foo-2.0 -nix-env -q '*' | grepQuiet bar - -# Rollback again: should remove "bar". -nix-env --rollback -nix-env -q '*' | grepQuiet foo-2.0 -if nix-env -q '*' | grepQuiet bar; then false; fi - -# Count generations. -nix-env --list-generations -test "$(nix-env --list-generations | wc -l)" -eq 7 - -# Doing the same operation twice results in the same generation, which triggers -# "lazy" behaviour and does not create a new symlink. - -nix-env -i foo -nix-env -i foo - -# Count generations. -nix-env --list-generations -test "$(nix-env --list-generations | wc -l)" -eq 8 - -# Switch to a specified generation. -nix-env --switch-generation 7 -[ "$(nix-store -q --resolve $profiles/test)" = "$oldGen" ] - -# Install foo-1.0, now using its store path. -nix-env -i "$outPath10" -nix-env -q '*' | grepQuiet foo-1.0 -nix-store -qR $profiles/test | grep "$outPath10" -nix-store -q --referrers-closure $profiles/test | grep "$(nix-store -q --resolve $profiles/test)" -[ "$(nix-store -q --deriver "$outPath10")" = $drvPath10 ] - -# Uninstall foo-1.0, using a symlink to its store path. -ln -sfn $outPath10/bin/foo $TEST_ROOT/symlink -nix-env -e $TEST_ROOT/symlink -if nix-env -q '*' | grepQuiet foo; then false; fi -nix-store -qR $profiles/test | grepInverse "$outPath10" - -# Install foo-1.0, now using a symlink to its store path. -nix-env -i $TEST_ROOT/symlink -nix-env -q '*' | grepQuiet foo - -# Delete all old generations. -nix-env --delete-generations old - -# Run the garbage collector. This should get rid of foo-2.0 but not -# foo-1.0. -nix-collect-garbage -test -e "$outPath10" -(! [ -e "$outPath20" ]) - -# Uninstall everything -nix-env -e '*' -test "$(nix-env -q '*' | wc -l)" -eq 0 - -# Installing "foo" should only install the newest foo. -nix-env -i foo -test "$(nix-env -q '*' | grep foo- | wc -l)" -eq 1 -nix-env -q '*' | grepQuiet foo-2.0 - -# On the other hand, this should install both (and should fail due to -# a collision). -nix-env -e '*' -(! nix-env -i foo-1.0 foo-2.0) - -# Installing "*" should install one foo and one bar. -nix-env -e '*' -nix-env -i '*' -test "$(nix-env -q '*' | wc -l)" -eq 2 -nix-env -q '*' | grepQuiet foo-2.0 -nix-env -q '*' | grepQuiet bar-0.1.1 - -# Test priorities: foo-0.1 has a lower priority than foo-1.0, so it -# should be possible to install both without a collision. Also test -# ‘--set-flag priority’ to manually override the declared priorities. -nix-env -e '*' -nix-env -i foo-0.1 foo-1.0 -[ "$($profiles/test/bin/foo)" = "foo-1.0" ] -nix-env --set-flag priority 1 foo-0.1 -[ "$($profiles/test/bin/foo)" = "foo-0.1" ] - -# Test nix-env --set. -nix-env --set $outPath10 -[ "$(nix-store -q --resolve $profiles/test)" = $outPath10 ] -nix-env --set $drvPath10 -[ "$(nix-store -q --resolve $profiles/test)" = $outPath10 ] - -# Test the case where $HOME contains a symlink. -mkdir -p $TEST_ROOT/real-home/alice/.nix-defexpr/channels -ln -sfn $TEST_ROOT/real-home $TEST_ROOT/home -ln -sfn $(pwd)/user-envs.nix $TEST_ROOT/home/alice/.nix-defexpr/channels/foo -HOME=$TEST_ROOT/home/alice nix-env -i foo-0.1 +source ./user-envs-test-case.sh diff --git a/tests/nixos/containers/containers.nix b/tests/nixos/containers/containers.nix index c8ee78a4a58e..6773f5628a3a 100644 --- a/tests/nixos/containers/containers.nix +++ b/tests/nixos/containers/containers.nix @@ -33,30 +33,30 @@ # Test that 'id' gives the expected result in various configurations. # Existing UIDs, sandbox. - host.succeed("nix build -v --no-auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-1") + host.succeed("nix build --no-auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-1") host.succeed("[[ $(cat ./result) = 'uid=1000(nixbld) gid=100(nixbld) groups=100(nixbld)' ]]") # Existing UIDs, no sandbox. - host.succeed("nix build -v --no-auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-2") + host.succeed("nix build --no-auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-2") host.succeed("[[ $(cat ./result) = 'uid=30001(nixbld1) gid=30000(nixbld) groups=30000(nixbld)' ]]") # Auto-allocated UIDs, sandbox. - host.succeed("nix build -v --auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-3") + host.succeed("nix build --auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-3") host.succeed("[[ $(cat ./result) = 'uid=1000(nixbld) gid=100(nixbld) groups=100(nixbld)' ]]") # Auto-allocated UIDs, no sandbox. - host.succeed("nix build -v --auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-4") + host.succeed("nix build --auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-4") host.succeed("[[ $(cat ./result) = 'uid=872415232 gid=30000(nixbld) groups=30000(nixbld)' ]]") # Auto-allocated UIDs, UID range, sandbox. - host.succeed("nix build -v --auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-5 --arg uidRange true") + host.succeed("nix build --auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-5 --arg uidRange true") host.succeed("[[ $(cat ./result) = 'uid=0(root) gid=0(root) groups=0(root)' ]]") # Auto-allocated UIDs, UID range, no sandbox. - host.fail("nix build -v --auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-6 --arg uidRange true") + host.fail("nix build --auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-6 --arg uidRange true") # Run systemd-nspawn in a Nix build. - host.succeed("nix build -v --auto-allocate-uids --sandbox -L --offline --impure --file ${./systemd-nspawn.nix} --argstr nixpkgs ${nixpkgs}") + host.succeed("nix build --auto-allocate-uids --sandbox -L --offline --impure --file ${./systemd-nspawn.nix} --argstr nixpkgs ${nixpkgs}") host.succeed("[[ $(cat ./result/msg) = 'Hello World' ]]") ''; diff --git a/tests/nixos/nss-preload.nix b/tests/nixos/nss-preload.nix index 00505d114211..610769c8df52 100644 --- a/tests/nixos/nss-preload.nix +++ b/tests/nixos/nss-preload.nix @@ -32,6 +32,7 @@ let impureEnvVars = [ "http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy" + "HTTP_PROXY" "HTTPS_PROXY" "FTP_PROXY" "ALL_PROXY" "NO_PROXY" ]; urls = [ "http://example.com" ]; diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 7b32b320bac5..be379a909934 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -102,6 +102,74 @@ namespace nix { , type \ ) +#define ASSERT_TRACE3(args, type, message, context1, context2) \ + ASSERT_THROW( \ + std::string expr(args); \ + std::string name = expr.substr(0, expr.find(" ")); \ + try { \ + Value v = eval("builtins." args); \ + state.forceValueDeep(v); \ + } catch (BaseError & e) { \ + ASSERT_EQ(PrintToString(e.info().msg), \ + PrintToString(message)); \ + ASSERT_EQ(e.info().traces.size(), 3) << "while testing " args << std::endl << e.what(); \ + auto trace = e.info().traces.rbegin(); \ + ASSERT_EQ(PrintToString(trace->hint), \ + PrintToString(context1)); \ + ++trace; \ + ASSERT_EQ(PrintToString(trace->hint), \ + PrintToString(context2)); \ + ++trace; \ + ASSERT_EQ(PrintToString(trace->hint), \ + PrintToString(HintFmt("while calling the '%s' builtin", name))); \ + throw; \ + } \ + , type \ + ) + +#define ASSERT_TRACE4(args, type, message, context1, context2, context3) \ + ASSERT_THROW( \ + std::string expr(args); \ + std::string name = expr.substr(0, expr.find(" ")); \ + try { \ + Value v = eval("builtins." args); \ + state.forceValueDeep(v); \ + } catch (BaseError & e) { \ + ASSERT_EQ(PrintToString(e.info().msg), \ + PrintToString(message)); \ + ASSERT_EQ(e.info().traces.size(), 4) << "while testing " args << std::endl << e.what(); \ + auto trace = e.info().traces.rbegin(); \ + ASSERT_EQ(PrintToString(trace->hint), \ + PrintToString(context1)); \ + ++trace; \ + ASSERT_EQ(PrintToString(trace->hint), \ + PrintToString(context2)); \ + ++trace; \ + ASSERT_EQ(PrintToString(trace->hint), \ + PrintToString(context3)); \ + ++trace; \ + ASSERT_EQ(PrintToString(trace->hint), \ + PrintToString(HintFmt("while calling the '%s' builtin", name))); \ + throw; \ + } \ + , type \ + ) + +// We assume that expr starts with "builtins.derivationStrict { name =", +// otherwise the name attribute position (1, 29) would be invalid. +#define DERIVATION_TRACE_HINTFMT(name) \ + HintFmt("while evaluating derivation '%s'\n" \ + " whose name attribute is located at %s", \ + name, Pos(1, 29, Pos::String{.source = make_ref(expr)})) + +// To keep things simple, we also assume that derivation name is "foo". +#define ASSERT_DERIVATION_TRACE1(args, type, message) \ + ASSERT_TRACE2(args, type, message, DERIVATION_TRACE_HINTFMT("foo")) +#define ASSERT_DERIVATION_TRACE2(args, type, message, context) \ + ASSERT_TRACE3(args, type, message, context, DERIVATION_TRACE_HINTFMT("foo")) +#define ASSERT_DERIVATION_TRACE3(args, type, message, context1, context2) \ + ASSERT_TRACE4(args, type, message, context1, context2, DERIVATION_TRACE_HINTFMT("foo")) + TEST_F(ErrorTraceTest, genericClosure) { ASSERT_TRACE2("genericClosure 1", TypeError, @@ -1185,7 +1253,6 @@ namespace nix { } - /* // Needs different ASSERTs TEST_F(ErrorTraceTest, derivationStrict) { ASSERT_TRACE2("derivationStrict \"\"", TypeError, @@ -1197,102 +1264,115 @@ namespace nix { HintFmt("attribute '%s' missing", "name"), HintFmt("in the attrset passed as argument to builtins.derivationStrict")); - ASSERT_TRACE2("derivationStrict { name = 1; }", + ASSERT_TRACE3("derivationStrict { name = 1; }", TypeError, - HintFmt("expected a string but found %s: %s", "an integer", "1"), - HintFmt("while evaluating the `name` attribute passed to builtins.derivationStrict")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the `name` attribute passed to builtins.derivationStrict"), + HintFmt("while evaluating the derivation attribute 'name'")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; }", - TypeError, - HintFmt("required attribute 'builder' missing"), - HintFmt("while evaluating derivation 'foo'")); + ASSERT_DERIVATION_TRACE1("derivationStrict { name = \"foo\"; }", + EvalError, + HintFmt("required attribute 'builder' missing")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __structuredAttrs = 15; }", + ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __structuredAttrs = 15; }", TypeError, - HintFmt("expected a Boolean but found %s: %s", "an integer", "15"), + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "15" ANSI_NORMAL)), HintFmt("while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __ignoreNulls = 15; }", + ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __ignoreNulls = 15; }", TypeError, - HintFmt("expected a Boolean but found %s: %s", "an integer", "15"), + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "15" ANSI_NORMAL)), HintFmt("while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = 15; }", - TypeError, - HintFmt("invalid value '15' for 'outputHashMode' attribute"), - HintFmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); + ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = 15; }", + EvalError, + HintFmt("invalid value '%s' for 'outputHashMode' attribute", "15"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputHashMode", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = \"custom\"; }", - TypeError, - HintFmt("invalid value 'custom' for 'outputHashMode' attribute"), - HintFmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); + ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = \"custom\"; }", + EvalError, + HintFmt("invalid value '%s' for 'outputHashMode' attribute", "custom"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputHashMode", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = {}; }", + ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = {}; }", TypeError, - HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), - HintFmt("while evaluating the attribute 'system' of derivation 'foo'")); + HintFmt("cannot coerce %s to a string: { }", "a set"), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "system", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = {}; }", + ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = {}; }", TypeError, - HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), - HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("cannot coerce %s to a string: { }", "a set"), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"drv\"; }", - TypeError, - HintFmt("invalid derivation output name 'drv'"), - HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"drvPath\"; }", + EvalError, + HintFmt("invalid derivation output name 'drvPath'"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = []; }", - TypeError, + ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; outputs = \"out\"; __structuredAttrs = true; }", + EvalError, + HintFmt("expected a list but found %s: %s", "a string", "\"out\""), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); + + ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = []; }", + EvalError, HintFmt("derivation cannot have an empty set of outputs"), - HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"drv\" ]; }", - TypeError, - HintFmt("invalid derivation output name 'drv'"), - HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"drvPath\" ]; }", + EvalError, + HintFmt("invalid derivation output name 'drvPath'"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"out\" \"out\" ]; }", - TypeError, - HintFmt("duplicate derivation output 'out'"), - HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"out\" \"out\" ]; }", + EvalError, + HintFmt("duplicate derivation output '%s'", "out"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __contentAddressed = \"true\"; }", + ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __contentAddressed = \"true\"; }", TypeError, HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - HintFmt("while evaluating the attribute '__contentAddressed' of derivation 'foo'")); + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "__contentAddressed", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", + ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - HintFmt("while evaluating the attribute '__impure' of derivation 'foo'")); + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "__impure", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", + ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - HintFmt("while evaluating the attribute '__impure' of derivation 'foo'")); + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "__impure", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = \"foo\"; }", + ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = \"foo\"; }", TypeError, HintFmt("expected a list but found %s: %s", "a string", "\"foo\""), - HintFmt("while evaluating the attribute 'args' of derivation 'foo'")); + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "args", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", + ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", TypeError, - HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), - HintFmt("while evaluating an element of the argument list")); + HintFmt("cannot coerce %s to a string: { }", "a set"), + HintFmt("while evaluating an element of the argument list"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "args", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ \"a\" {} ]; }", + ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ \"a\" {} ]; }", TypeError, - HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), - HintFmt("while evaluating an element of the argument list")); + HintFmt("cannot coerce %s to a string: { }", "a set"), + HintFmt("while evaluating an element of the argument list"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "args", "foo")); - ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; FOO = {}; }", + ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; FOO = {}; }", TypeError, - HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), - HintFmt("while evaluating the attribute 'FOO' of derivation 'foo'")); - + HintFmt("cannot coerce %s to a string: { }", "a set"), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "FOO", "foo")); } - */ } /* namespace nix */ diff --git a/tests/unit/libexpr/nix_api_value.cc b/tests/unit/libexpr/nix_api_value.cc index ac0cdb9c449e..6e1131e10f80 100644 --- a/tests/unit/libexpr/nix_api_value.cc +++ b/tests/unit/libexpr/nix_api_value.cc @@ -14,11 +14,16 @@ namespace nixC { -TEST_F(nix_api_expr_test, nix_value_set_get_int) +TEST_F(nix_api_expr_test, nix_value_get_int_invalid) { ASSERT_EQ(0, nix_get_int(ctx, nullptr)); - ASSERT_DEATH(nix_get_int(ctx, value), ""); + assert_ctx_err(); + ASSERT_EQ(0, nix_get_int(ctx, value)); + assert_ctx_err(); +} +TEST_F(nix_api_expr_test, nix_value_set_get_int) +{ int myInt = 1; nix_init_int(ctx, value, myInt); @@ -27,24 +32,34 @@ TEST_F(nix_api_expr_test, nix_value_set_get_int) ASSERT_EQ(NIX_TYPE_INT, nix_get_type(ctx, value)); } -TEST_F(nix_api_expr_test, nix_value_set_get_float) +TEST_F(nix_api_expr_test, nix_value_set_get_float_invalid) { - ASSERT_FLOAT_EQ(0.0, nix_get_float(ctx, nullptr)); - ASSERT_DEATH(nix_get_float(ctx, value), ""); + ASSERT_DOUBLE_EQ(0.0, nix_get_float(ctx, nullptr)); + assert_ctx_err(); + ASSERT_DOUBLE_EQ(0.0, nix_get_float(ctx, value)); + assert_ctx_err(); +} - float myDouble = 1.0; +TEST_F(nix_api_expr_test, nix_value_set_get_float) +{ + double myDouble = 1.0; nix_init_float(ctx, value, myDouble); - ASSERT_FLOAT_EQ(myDouble, nix_get_float(ctx, value)); + ASSERT_DOUBLE_EQ(myDouble, nix_get_float(ctx, value)); ASSERT_STREQ("a float", nix_get_typename(ctx, value)); ASSERT_EQ(NIX_TYPE_FLOAT, nix_get_type(ctx, value)); } -TEST_F(nix_api_expr_test, nix_value_set_get_bool) +TEST_F(nix_api_expr_test, nix_value_set_get_bool_invalid) { ASSERT_EQ(false, nix_get_bool(ctx, nullptr)); - ASSERT_DEATH(nix_get_bool(ctx, value), ""); + assert_ctx_err(); + ASSERT_EQ(false, nix_get_bool(ctx, value)); + assert_ctx_err(); +} +TEST_F(nix_api_expr_test, nix_value_set_get_bool) +{ bool myBool = true; nix_init_bool(ctx, value, myBool); @@ -53,12 +68,18 @@ TEST_F(nix_api_expr_test, nix_value_set_get_bool) ASSERT_EQ(NIX_TYPE_BOOL, nix_get_type(ctx, value)); } -TEST_F(nix_api_expr_test, nix_value_set_get_string) +TEST_F(nix_api_expr_test, nix_value_set_get_string_invalid) { std::string string_value; ASSERT_EQ(NIX_ERR_UNKNOWN, nix_get_string(ctx, nullptr, OBSERVE_STRING(string_value))); - ASSERT_DEATH(nix_get_string(ctx, value, OBSERVE_STRING(string_value)), ""); + assert_ctx_err(); + ASSERT_EQ(NIX_ERR_UNKNOWN, nix_get_string(ctx, value, OBSERVE_STRING(string_value))); + assert_ctx_err(); +} +TEST_F(nix_api_expr_test, nix_value_set_get_string) +{ + std::string string_value; const char * myString = "some string"; nix_init_string(ctx, value, myString); @@ -68,21 +89,29 @@ TEST_F(nix_api_expr_test, nix_value_set_get_string) ASSERT_EQ(NIX_TYPE_STRING, nix_get_type(ctx, value)); } -TEST_F(nix_api_expr_test, nix_value_set_get_null) +TEST_F(nix_api_expr_test, nix_value_set_get_null_invalid) { - ASSERT_DEATH(nix_get_typename(ctx, value), ""); + ASSERT_EQ(NULL, nix_get_typename(ctx, value)); + assert_ctx_err(); +} +TEST_F(nix_api_expr_test, nix_value_set_get_null) +{ nix_init_null(ctx, value); ASSERT_STREQ("null", nix_get_typename(ctx, value)); ASSERT_EQ(NIX_TYPE_NULL, nix_get_type(ctx, value)); } -TEST_F(nix_api_expr_test, nix_value_set_get_path) +TEST_F(nix_api_expr_test, nix_value_set_get_path_invalid) { ASSERT_EQ(nullptr, nix_get_path_string(ctx, nullptr)); - ASSERT_DEATH(nix_get_path_string(ctx, value), ""); - + assert_ctx_err(); + ASSERT_EQ(nullptr, nix_get_path_string(ctx, value)); + assert_ctx_err(); +} +TEST_F(nix_api_expr_test, nix_value_set_get_path) +{ const char * p = "/nix/store/40s0qmrfb45vlh6610rk29ym318dswdr-myname"; nix_init_path_string(ctx, state, value, p); @@ -91,25 +120,39 @@ TEST_F(nix_api_expr_test, nix_value_set_get_path) ASSERT_EQ(NIX_TYPE_PATH, nix_get_type(ctx, value)); } -TEST_F(nix_api_expr_test, nix_build_and_init_list) +TEST_F(nix_api_expr_test, nix_build_and_init_list_invalid) { ASSERT_EQ(nullptr, nix_get_list_byidx(ctx, nullptr, state, 0)); + assert_ctx_err(); ASSERT_EQ(0, nix_get_list_size(ctx, nullptr)); + assert_ctx_err(); - ASSERT_DEATH(nix_get_list_byidx(ctx, value, state, 0), ""); - ASSERT_DEATH(nix_get_list_size(ctx, value), ""); + ASSERT_EQ(nullptr, nix_get_list_byidx(ctx, value, state, 0)); + assert_ctx_err(); + ASSERT_EQ(0, nix_get_list_size(ctx, value)); + assert_ctx_err(); +} +TEST_F(nix_api_expr_test, nix_build_and_init_list) +{ int size = 10; ListBuilder * builder = nix_make_list_builder(ctx, state, size); Value * intValue = nix_alloc_value(ctx, state); + Value * intValue2 = nix_alloc_value(ctx, state); + + // `init` and `insert` can be called in any order nix_init_int(ctx, intValue, 42); nix_list_builder_insert(ctx, builder, 0, intValue); + nix_list_builder_insert(ctx, builder, 1, intValue2); + nix_init_int(ctx, intValue2, 43); + nix_make_list(ctx, builder, value); nix_list_builder_free(builder); ASSERT_EQ(42, nix_get_int(ctx, nix_get_list_byidx(ctx, value, state, 0))); - ASSERT_EQ(nullptr, nix_get_list_byidx(ctx, value, state, 1)); + ASSERT_EQ(43, nix_get_int(ctx, nix_get_list_byidx(ctx, value, state, 1))); + ASSERT_EQ(nullptr, nix_get_list_byidx(ctx, value, state, 2)); ASSERT_EQ(10, nix_get_list_size(ctx, value)); ASSERT_STREQ("a list", nix_get_typename(ctx, value)); @@ -119,20 +162,33 @@ TEST_F(nix_api_expr_test, nix_build_and_init_list) nix_gc_decref(ctx, intValue); } -TEST_F(nix_api_expr_test, nix_build_and_init_attr) +TEST_F(nix_api_expr_test, nix_build_and_init_attr_invalid) { ASSERT_EQ(nullptr, nix_get_attr_byname(ctx, nullptr, state, 0)); + assert_ctx_err(); ASSERT_EQ(nullptr, nix_get_attr_byidx(ctx, nullptr, state, 0, nullptr)); + assert_ctx_err(); ASSERT_EQ(nullptr, nix_get_attr_name_byidx(ctx, nullptr, state, 0)); + assert_ctx_err(); ASSERT_EQ(0, nix_get_attrs_size(ctx, nullptr)); + assert_ctx_err(); ASSERT_EQ(false, nix_has_attr_byname(ctx, nullptr, state, "no-value")); + assert_ctx_err(); + + ASSERT_EQ(nullptr, nix_get_attr_byname(ctx, value, state, 0)); + assert_ctx_err(); + ASSERT_EQ(nullptr, nix_get_attr_byidx(ctx, value, state, 0, nullptr)); + assert_ctx_err(); + ASSERT_EQ(nullptr, nix_get_attr_name_byidx(ctx, value, state, 0)); + assert_ctx_err(); + ASSERT_EQ(0, nix_get_attrs_size(ctx, value)); + assert_ctx_err(); + ASSERT_EQ(false, nix_has_attr_byname(ctx, value, state, "no-value")); + assert_ctx_err(); +} - ASSERT_DEATH(nix_get_attr_byname(ctx, value, state, 0), ""); - ASSERT_DEATH(nix_get_attr_byidx(ctx, value, state, 0, nullptr), ""); - ASSERT_DEATH(nix_get_attr_name_byidx(ctx, value, state, 0), ""); - ASSERT_DEATH(nix_get_attrs_size(ctx, value), ""); - ASSERT_DEATH(nix_has_attr_byname(ctx, value, state, "no-value"), ""); - +TEST_F(nix_api_expr_test, nix_build_and_init_attr) +{ int size = 10; const char ** out_name = (const char **) malloc(sizeof(char *)); @@ -311,4 +367,17 @@ TEST_F(nix_api_expr_test, nix_value_init_apply_lazy_arg) nix_gc_decref(ctx, e); } +TEST_F(nix_api_expr_test, nix_copy_value) +{ + Value * source = nix_alloc_value(ctx, state); + + nix_init_int(ctx, source, 42); + nix_copy_value(ctx, value, source); + + ASSERT_EQ(42, nix_get_int(ctx, value)); + + // Clean up + nix_gc_decref(ctx, source); +} + } diff --git a/tests/unit/libexpr/primops.cc b/tests/unit/libexpr/primops.cc index 5ddc031f73a7..5b5898237980 100644 --- a/tests/unit/libexpr/primops.cc +++ b/tests/unit/libexpr/primops.cc @@ -2,7 +2,7 @@ #include #include "eval-settings.hh" -#include "memory-input-accessor.hh" +#include "memory-source-accessor.hh" #include "tests/libexpr.hh" diff --git a/tests/unit/libexpr/search-path.cc b/tests/unit/libexpr/search-path.cc index dbe7ab95fb57..0806793557d2 100644 --- a/tests/unit/libexpr/search-path.cc +++ b/tests/unit/libexpr/search-path.cc @@ -5,85 +5,85 @@ namespace nix { -TEST(SearchPathElem, parse_justPath) { +TEST(LookupPathElem, parse_justPath) { ASSERT_EQ( - SearchPath::Elem::parse("foo"), - (SearchPath::Elem { - .prefix = SearchPath::Prefix { .s = "" }, - .path = SearchPath::Path { .s = "foo" }, + LookupPath::Elem::parse("foo"), + (LookupPath::Elem { + .prefix = LookupPath::Prefix { .s = "" }, + .path = LookupPath::Path { .s = "foo" }, })); } -TEST(SearchPathElem, parse_emptyPrefix) { +TEST(LookupPathElem, parse_emptyPrefix) { ASSERT_EQ( - SearchPath::Elem::parse("=foo"), - (SearchPath::Elem { - .prefix = SearchPath::Prefix { .s = "" }, - .path = SearchPath::Path { .s = "foo" }, + LookupPath::Elem::parse("=foo"), + (LookupPath::Elem { + .prefix = LookupPath::Prefix { .s = "" }, + .path = LookupPath::Path { .s = "foo" }, })); } -TEST(SearchPathElem, parse_oneEq) { +TEST(LookupPathElem, parse_oneEq) { ASSERT_EQ( - SearchPath::Elem::parse("foo=bar"), - (SearchPath::Elem { - .prefix = SearchPath::Prefix { .s = "foo" }, - .path = SearchPath::Path { .s = "bar" }, + LookupPath::Elem::parse("foo=bar"), + (LookupPath::Elem { + .prefix = LookupPath::Prefix { .s = "foo" }, + .path = LookupPath::Path { .s = "bar" }, })); } -TEST(SearchPathElem, parse_twoEqs) { +TEST(LookupPathElem, parse_twoEqs) { ASSERT_EQ( - SearchPath::Elem::parse("foo=bar=baz"), - (SearchPath::Elem { - .prefix = SearchPath::Prefix { .s = "foo" }, - .path = SearchPath::Path { .s = "bar=baz" }, + LookupPath::Elem::parse("foo=bar=baz"), + (LookupPath::Elem { + .prefix = LookupPath::Prefix { .s = "foo" }, + .path = LookupPath::Path { .s = "bar=baz" }, })); } -TEST(SearchPathElem, suffixIfPotentialMatch_justPath) { - SearchPath::Prefix prefix { .s = "" }; +TEST(LookupPathElem, suffixIfPotentialMatch_justPath) { + LookupPath::Prefix prefix { .s = "" }; ASSERT_EQ(prefix.suffixIfPotentialMatch("any/thing"), std::optional { "any/thing" }); } -TEST(SearchPathElem, suffixIfPotentialMatch_misleadingPrefix1) { - SearchPath::Prefix prefix { .s = "foo" }; +TEST(LookupPathElem, suffixIfPotentialMatch_misleadingPrefix1) { + LookupPath::Prefix prefix { .s = "foo" }; ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX"), std::nullopt); } -TEST(SearchPathElem, suffixIfPotentialMatch_misleadingPrefix2) { - SearchPath::Prefix prefix { .s = "foo" }; +TEST(LookupPathElem, suffixIfPotentialMatch_misleadingPrefix2) { + LookupPath::Prefix prefix { .s = "foo" }; ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX/bar"), std::nullopt); } -TEST(SearchPathElem, suffixIfPotentialMatch_partialPrefix) { - SearchPath::Prefix prefix { .s = "fooX" }; +TEST(LookupPathElem, suffixIfPotentialMatch_partialPrefix) { + LookupPath::Prefix prefix { .s = "fooX" }; ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::nullopt); } -TEST(SearchPathElem, suffixIfPotentialMatch_exactPrefix) { - SearchPath::Prefix prefix { .s = "foo" }; +TEST(LookupPathElem, suffixIfPotentialMatch_exactPrefix) { + LookupPath::Prefix prefix { .s = "foo" }; ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::optional { "" }); } -TEST(SearchPathElem, suffixIfPotentialMatch_multiKey) { - SearchPath::Prefix prefix { .s = "foo/bar" }; +TEST(LookupPathElem, suffixIfPotentialMatch_multiKey) { + LookupPath::Prefix prefix { .s = "foo/bar" }; ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "baz" }); } -TEST(SearchPathElem, suffixIfPotentialMatch_trailingSlash) { - SearchPath::Prefix prefix { .s = "foo" }; +TEST(LookupPathElem, suffixIfPotentialMatch_trailingSlash) { + LookupPath::Prefix prefix { .s = "foo" }; ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/"), std::optional { "" }); } -TEST(SearchPathElem, suffixIfPotentialMatch_trailingDoubleSlash) { - SearchPath::Prefix prefix { .s = "foo" }; +TEST(LookupPathElem, suffixIfPotentialMatch_trailingDoubleSlash) { + LookupPath::Prefix prefix { .s = "foo" }; ASSERT_EQ(prefix.suffixIfPotentialMatch("foo//"), std::optional { "/" }); } -TEST(SearchPathElem, suffixIfPotentialMatch_trailingPath) { - SearchPath::Prefix prefix { .s = "foo" }; +TEST(LookupPathElem, suffixIfPotentialMatch_trailingPath) { + LookupPath::Prefix prefix { .s = "foo" }; ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "bar/baz" }); } diff --git a/tests/unit/libexpr/value/value.cc b/tests/unit/libexpr/value/value.cc new file mode 100644 index 000000000000..5762d5891f8b --- /dev/null +++ b/tests/unit/libexpr/value/value.cc @@ -0,0 +1,25 @@ +#include "value.hh" + +#include "tests/libstore.hh" + +namespace nix { + +class ValueTest : public LibStoreTest +{}; + +TEST_F(ValueTest, unsetValue) +{ + Value unsetValue; + ASSERT_EQ(false, unsetValue.isValid()); + ASSERT_EQ(nThunk, unsetValue.type(true)); + ASSERT_DEATH(unsetValue.type(), ""); +} + +TEST_F(ValueTest, vInt) +{ + Value vInt; + vInt.mkInt(42); + ASSERT_EQ(true, vInt.isValid()); +} + +} // namespace nix diff --git a/tests/unit/libfetchers/data/public-key/defaultType.json b/tests/unit/libfetchers/data/public-key/defaultType.json new file mode 100644 index 000000000000..43f02a420d56 --- /dev/null +++ b/tests/unit/libfetchers/data/public-key/defaultType.json @@ -0,0 +1,4 @@ +{ + "key": "ABCDE", + "type": "ssh-ed25519" +} diff --git a/tests/unit/libfetchers/data/public-key/noRoundTrip.json b/tests/unit/libfetchers/data/public-key/noRoundTrip.json new file mode 100644 index 000000000000..4dcbf9148422 --- /dev/null +++ b/tests/unit/libfetchers/data/public-key/noRoundTrip.json @@ -0,0 +1,3 @@ +{ + "key": "ABCDE" +} diff --git a/tests/unit/libfetchers/data/public-key/simple.json b/tests/unit/libfetchers/data/public-key/simple.json new file mode 100644 index 000000000000..f83b927ac57b --- /dev/null +++ b/tests/unit/libfetchers/data/public-key/simple.json @@ -0,0 +1,4 @@ +{ + "key": "ABCDE", + "type": "ssh-rsa" +} diff --git a/tests/unit/libfetchers/public-key.cc b/tests/unit/libfetchers/public-key.cc index fcd5c3af0bd0..8a639da9f6aa 100644 --- a/tests/unit/libfetchers/public-key.cc +++ b/tests/unit/libfetchers/public-key.cc @@ -1,18 +1,54 @@ #include #include "fetchers.hh" #include "json-utils.hh" +#include +#include "tests/characterization.hh" namespace nix { - TEST(PublicKey, jsonSerialization) { - auto json = nlohmann::json(fetchers::PublicKey { .key = "ABCDE" }); - ASSERT_EQ(json, R"({ "key": "ABCDE", "type": "ssh-ed25519" })"_json); +using nlohmann::json; + +class PublicKeyTest : public CharacterizationTest +{ + Path unitTestData = getUnitTestData() + "/public-key"; + +public: + Path goldenMaster(std::string_view testStem) const override { + return unitTestData + "/" + testStem; } - TEST(PublicKey, jsonDeserialization) { - auto pubKeyJson = R"({ "key": "ABCDE", "type": "ssh-ed25519" })"_json; - fetchers::PublicKey pubKey = pubKeyJson; +}; - ASSERT_EQ(pubKey.key, "ABCDE"); - ASSERT_EQ(pubKey.type, "ssh-ed25519"); +#define TEST_JSON(FIXTURE, NAME, VAL) \ + TEST_F(FIXTURE, PublicKey_ ## NAME ## _from_json) { \ + readTest(#NAME ".json", [&](const auto & encoded_) { \ + fetchers::PublicKey expected { VAL }; \ + fetchers::PublicKey got = nlohmann::json::parse(encoded_); \ + ASSERT_EQ(got, expected); \ + }); \ + } \ + \ + TEST_F(FIXTURE, PublicKey_ ## NAME ## _to_json) { \ + writeTest(#NAME ".json", [&]() -> json { \ + return nlohmann::json(fetchers::PublicKey { VAL }); \ + }, [](const auto & file) { \ + return json::parse(readFile(file)); \ + }, [](const auto & file, const auto & got) { \ + return writeFile(file, got.dump(2) + "\n"); \ + }); \ } + +TEST_JSON(PublicKeyTest, simple, (fetchers::PublicKey { .type = "ssh-rsa", .key = "ABCDE" })) + +TEST_JSON(PublicKeyTest, defaultType, fetchers::PublicKey { .key = "ABCDE" }) + +#undef TEST_JSON + +TEST_F(PublicKeyTest, PublicKey_noRoundTrip_from_json) { + readTest("noRoundTrip.json", [&](const auto & encoded_) { + fetchers::PublicKey expected = { .type = "ssh-ed25519", .key = "ABCDE" }; + fetchers::PublicKey got = nlohmann::json::parse(encoded_); + ASSERT_EQ(got, expected); + }); +} + } diff --git a/tests/unit/libstore-support/tests/libstore.hh b/tests/unit/libstore-support/tests/libstore.hh index 78b162b95687..267188224877 100644 --- a/tests/unit/libstore-support/tests/libstore.hh +++ b/tests/unit/libstore-support/tests/libstore.hh @@ -11,7 +11,7 @@ namespace nix { class LibStoreTest : public virtual ::testing::Test { public: static void SetUpTestSuite() { - initLibStore(); + initLibStore(false); } protected: diff --git a/tests/unit/libutil-support/tests/nix_api_util.hh b/tests/unit/libutil-support/tests/nix_api_util.hh index 75d302bd6d8c..efd2001167de 100644 --- a/tests/unit/libutil-support/tests/nix_api_util.hh +++ b/tests/unit/libutil-support/tests/nix_api_util.hh @@ -24,7 +24,9 @@ protected: nix_c_context * ctx; - inline void assert_ctx_ok() { + inline void assert_ctx_ok() + { + if (nix_err_code(ctx) == NIX_OK) { return; } @@ -33,5 +35,14 @@ protected: std::string msg(p, n); FAIL() << "nix_err_code(ctx) != NIX_OK, message: " << msg; } + + inline void assert_ctx_err() + { + if (nix_err_code(ctx) != NIX_OK) { + return; + } + FAIL() << "Got NIX_OK, but expected an error!"; + } }; + } diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc index 4f92488d68f0..ff934c117b8e 100644 --- a/tests/unit/libutil/git.cc +++ b/tests/unit/libutil/git.cc @@ -154,8 +154,8 @@ TEST_F(GitTest, tree_write) { TEST_F(GitTest, both_roundrip) { using File = MemorySourceAccessor::File; - MemorySourceAccessor files; - files.root = File::Directory { + auto files = make_ref(); + files->root = File::Directory { .contents { { "foo", @@ -189,12 +189,12 @@ TEST_F(GitTest, both_roundrip) { std::map cas; std::function dumpHook; - dumpHook = [&](const CanonPath & path) { + dumpHook = [&](const SourcePath & path) { StringSink s; HashSink hashSink { HashAlgorithm::SHA1 }; TeeSink s2 { s, hashSink }; auto mode = dump( - files, path, s2, dumpHook, + path, s2, dumpHook, defaultPathFilter, mockXpSettings); auto hash = hashSink.finish().first; cas.insert_or_assign(hash, std::move(s.s)); @@ -204,11 +204,11 @@ TEST_F(GitTest, both_roundrip) { }; }; - auto root = dumpHook(CanonPath::root); + auto root = dumpHook({files}); - MemorySourceAccessor files2; + auto files2 = make_ref(); - MemorySink sinkFiles2 { files2 }; + MemorySink sinkFiles2 { *files2 }; std::function mkSinkHook; mkSinkHook = [&](auto prefix, auto & hash, auto blobMode) { @@ -229,7 +229,7 @@ TEST_F(GitTest, both_roundrip) { mkSinkHook("", root.hash, BlobMode::Regular); - ASSERT_EQ(files, files2); + ASSERT_EQ(*files, *files2); } TEST(GitLsRemote, parseSymrefLineWithReference) { diff --git a/tests/unit/libutil/json-utils.cc b/tests/unit/libutil/json-utils.cc index ec653fff529e..c9370a74bfda 100644 --- a/tests/unit/libutil/json-utils.cc +++ b/tests/unit/libutil/json-utils.cc @@ -169,7 +169,19 @@ TEST(optionalValueAt, existing) { TEST(optionalValueAt, empty) { auto json = R"({})"_json; - ASSERT_EQ(optionalValueAt(json, "string2"), std::nullopt); + ASSERT_EQ(optionalValueAt(json, "string"), std::nullopt); +} + +TEST(getNullable, null) { + auto json = R"(null)"_json; + + ASSERT_EQ(getNullable(json), std::nullopt); +} + +TEST(getNullable, empty) { + auto json = R"({})"_json; + + ASSERT_EQ(getNullable(json), std::optional { R"({})"_json }); } } /* namespace nix */