Merge master into haskell-updates

Changed files
+2236 -1343
lib
maintainers
nixos
doc
manual
lib
make-options-doc
modules
misc
services
cluster
networking
web-apps
x11
display-managers
system
tests
pkgs
applications
audio
mopidy
editors
lapce
vscode
extensions
misc
networking
browsers
cluster
fluxcd
hadoop
qbec
spark
instant-messengers
cinny
mailreaders
himalaya
science
computer-architecture
qtrvsim
video
freetube
smplayer
data
fonts
borg-sans-mono
vazir-fonts
icons
kora-icon-theme
development
compilers
crystal
open-watcom
dotnet-modules
python-language-server
haskell-modules
interpreters
python
libraries
clearsilver
intel-gmmlib
libarchive-qt
libctl
libnbd
opendht
protolock
node-packages
python-modules
adafruit-platformdetect
androidtv
awscrt
azure-mgmt-monitor
bumps
cloudscraper
docx2txt
env-canada
google-cloud-redis
google-cloud-storage
intellifire4py
mdformat
meshtastic
nassl
neo4j-driver
proxmoxer
pyaussiebb
pycep-parser
pykrakenapi
pysigma
pysigma-backend-splunk
pysigma-pipeline-crowdstrike
pysigma-pipeline-sysmon
pytest-json-report
python-box
python-http-client
qcengine
readme_renderer
samsungtvws
soco
sslyze
tls-parser
twentemilieu
vt-py
tools
analysis
checkov
build-managers
bazel
bazel-remote
database
prisma-engines
doctl
electron
gomodifytags
jp
k6
kcli
rust
cargo-crev
stylua
web
nodejs
os-specific
servers
dns
coredns
http
apache-httpd
nextcloud
piping-server-rust
shells
tools
admin
berglas
fits-cloudctl
syft
trivy
archivers
backup
btrbk
misc
plantuml-server
up
networking
frp
hey
httpie
pirate-get
nix
security
cariddi
cfripper
exploitdb
feroxbuster
grype
sigma-cli
step-ca
system
syslog-ng
top-level
+62 -12
lib/modules.nix
···
catAttrs
concatLists
concatMap
-
count
+
concatStringsSep
elem
filter
findFirst
···
showOption
unknownModule
;
+
+
showDeclPrefix = loc: decl: prefix:
+
" - option(s) with prefix `${showOption (loc ++ [prefix])}' in module `${decl._file}'";
+
showRawDecls = loc: decls:
+
concatStringsSep "\n"
+
(sort (a: b: a < b)
+
(concatMap
+
(decl: map
+
(showDeclPrefix loc decl)
+
(attrNames decl.options)
+
)
+
decls
+
));
+
in
rec {
···
[{ inherit (module) file; inherit value; }]
) configs;
+
# Convert an option tree decl to a submodule option decl
+
optionTreeToOption = decl:
+
if isOption decl.options
+
then decl
+
else decl // {
+
options = mkOption {
+
type = types.submoduleWith {
+
modules = [ { options = decl.options; } ];
+
# `null` is not intended for use by modules. It is an internal
+
# value that means "whatever the user has declared elsewhere".
+
# This might become obsolete with https://github.com/NixOS/nixpkgs/issues/162398
+
shorthandOnlyDefinesConfig = null;
+
};
+
};
+
};
+
resultsByName = mapAttrs (name: decls:
# We're descending into attribute ‘name’.
let
loc = prefix ++ [name];
defns = defnsByName.${name} or [];
defns' = defnsByName'.${name} or [];
-
nrOptions = count (m: isOption m.options) decls;
+
optionDecls = filter (m: isOption m.options) decls;
in
-
if nrOptions == length decls then
+
if length optionDecls == length decls then
let opt = fixupOptionType loc (mergeOptionDecls loc decls);
in {
matchedOptions = evalOptionValue loc opt defns';
unmatchedDefns = [];
}
-
else if nrOptions != 0 then
-
let
-
firstOption = findFirst (m: isOption m.options) "" decls;
-
firstNonOption = findFirst (m: !isOption m.options) "" decls;
-
in
-
throw "The option `${showOption loc}' in `${firstOption._file}' is a prefix of options in `${firstNonOption._file}'."
+
else if optionDecls != [] then
+
if all (x: x.options.type.name == "submodule") optionDecls
+
# Raw options can only be merged into submodules. Merging into
+
# attrsets might be nice, but ambiguous. Suppose we have
+
# attrset as a `attrsOf submodule`. User declares option
+
# attrset.foo.bar, this could mean:
+
# a. option `bar` is only available in `attrset.foo`
+
# b. option `foo.bar` is available in all `attrset.*`
+
# c. reject and require "<name>" as a reminder that it behaves like (b).
+
# d. magically combine (a) and (c).
+
# All of the above are merely syntax sugar though.
+
then
+
let opt = fixupOptionType loc (mergeOptionDecls loc (map optionTreeToOption decls));
+
in {
+
matchedOptions = evalOptionValue loc opt defns';
+
unmatchedDefns = [];
+
}
+
else
+
let
+
firstNonOption = findFirst (m: !isOption m.options) "" decls;
+
nonOptions = filter (m: !isOption m.options) decls;
+
in
+
throw "The option `${showOption loc}' in module `${(lib.head optionDecls)._file}' would be a parent of the following options, but its type `${(lib.head optionDecls).options.type.description or "<no description>"}' does not support nested options.\n${
+
showRawDecls loc nonOptions
+
}"
else
mergeModules' loc decls defns) declsByName;
···
compare = a: b: (a.priority or 1000) < (b.priority or 1000);
in sort compare defs';
-
/* Hack for backward compatibility: convert options of type
-
optionSet to options of type submodule. FIXME: remove
-
eventually. */
fixupOptionType = loc: opt:
let
options = opt.options or
(throw "Option `${showOption loc}' has type optionSet but has no option attribute, in ${showFiles opt.declarations}.");
+
+
# Hack for backward compatibility: convert options of type
+
# optionSet to options of type submodule. FIXME: remove
+
# eventually.
f = tp:
if tp.name == "option set" || tp.name == "submodule" then
throw "The option ${showOption loc} uses submodules without a wrapping type, in ${showFiles opt.declarations}."
+1 -1
lib/options.nix
···
then true
else opt.visible or true;
readOnly = opt.readOnly or false;
-
type = opt.type.description or null;
+
type = opt.type.description or "unspecified";
}
// optionalAttrs (opt ? example) { example = scrubOptionValue opt.example; }
// optionalAttrs (opt ? default) { default = scrubOptionValue opt.default; }
+13
lib/tests/modules.sh
···
checkConfigOutput '^false$' config.enable ./declare-enable.nix
checkConfigError 'The option .* does not exist. Definition values:\n\s*- In .*: true' config.enable ./define-enable.nix
+
checkConfigOutput '^1$' config.bare-submodule.nested ./declare-bare-submodule.nix ./declare-bare-submodule-nested-option.nix
+
checkConfigOutput '^2$' config.bare-submodule.deep ./declare-bare-submodule.nix ./declare-bare-submodule-deep-option.nix
+
checkConfigOutput '^42$' config.bare-submodule.nested ./declare-bare-submodule.nix ./declare-bare-submodule-nested-option.nix ./declare-bare-submodule-deep-option.nix ./define-bare-submodule-values.nix
+
checkConfigOutput '^420$' config.bare-submodule.deep ./declare-bare-submodule.nix ./declare-bare-submodule-nested-option.nix ./declare-bare-submodule-deep-option.nix ./define-bare-submodule-values.nix
+
checkConfigOutput '^2$' config.bare-submodule.deep ./declare-bare-submodule.nix ./declare-bare-submodule-deep-option.nix ./define-shorthandOnlyDefinesConfig-true.nix
+
checkConfigError 'The option .bare-submodule.deep. in .*/declare-bare-submodule-deep-option.nix. is already declared in .*/declare-bare-submodule-deep-option-duplicate.nix' config.bare-submodule.deep ./declare-bare-submodule.nix ./declare-bare-submodule-deep-option.nix ./declare-bare-submodule-deep-option-duplicate.nix
+
# Check integer types.
# unsigned
checkConfigOutput '^42$' config.value ./declare-int-unsigned-value.nix ./define-value-int-positive.nix
···
checkConfigOutput "10" config.processedToplevel ./raw.nix
checkConfigError "The option .multiple. is defined multiple times" config.multiple ./raw.nix
checkConfigOutput "bar" config.priorities ./raw.nix
+
+
## Option collision
+
checkConfigError \
+
'The option .set. in module .*/declare-set.nix. would be a parent of the following options, but its type .attribute set of signed integers. does not support nested options.\n\s*- option[(]s[)] with prefix .set.enable. in module .*/declare-enable-nested.nix.' \
+
config.set \
+
./declare-set.nix ./declare-enable-nested.nix
# Test that types.optionType merges types correctly
checkConfigOutput '^10$' config.theOption.int ./optionTypeMerging.nix
+10
lib/tests/modules/declare-bare-submodule-deep-option-duplicate.nix
···
+
{ lib, ... }:
+
let
+
inherit (lib) mkOption types;
+
in
+
{
+
options.bare-submodule.deep = mkOption {
+
type = types.int;
+
default = 2;
+
};
+
}
+10
lib/tests/modules/declare-bare-submodule-deep-option.nix
···
+
{ lib, ... }:
+
let
+
inherit (lib) mkOption types;
+
in
+
{
+
options.bare-submodule.deep = mkOption {
+
type = types.int;
+
default = 2;
+
};
+
}
+19
lib/tests/modules/declare-bare-submodule-nested-option.nix
···
+
{ config, lib, ... }:
+
let
+
inherit (lib) mkOption types;
+
in
+
{
+
options.bare-submodule = mkOption {
+
type = types.submoduleWith {
+
shorthandOnlyDefinesConfig = config.shorthandOnlyDefinesConfig;
+
modules = [
+
{
+
options.nested = mkOption {
+
type = types.int;
+
default = 1;
+
};
+
}
+
];
+
};
+
};
+
}
+18
lib/tests/modules/declare-bare-submodule.nix
···
+
{ config, lib, ... }:
+
let
+
inherit (lib) mkOption types;
+
in
+
{
+
options.bare-submodule = mkOption {
+
type = types.submoduleWith {
+
modules = [ ];
+
shorthandOnlyDefinesConfig = config.shorthandOnlyDefinesConfig;
+
};
+
default = {};
+
};
+
+
# config-dependent options: won't recommend, but useful for making this test parameterized
+
options.shorthandOnlyDefinesConfig = mkOption {
+
default = false;
+
};
+
}
+12
lib/tests/modules/declare-set.nix
···
+
{ lib, ... }:
+
+
{
+
options.set = lib.mkOption {
+
default = { };
+
example = { a = 1; };
+
type = lib.types.attrsOf lib.types.int;
+
description = ''
+
Some descriptive text
+
'';
+
};
+
}
+4
lib/tests/modules/define-bare-submodule-values.nix
···
+
{
+
bare-submodule.nested = 42;
+
bare-submodule.deep = 420;
+
}
+1
lib/tests/modules/define-shorthandOnlyDefinesConfig-true.nix
···
+
{ shorthandOnlyDefinesConfig = true; }
+15 -7
lib/types.nix
···
let
inherit (lib.modules) evalModules;
-
coerce = unify: value: if isFunction value
-
then setFunctionArgs (args: unify (value args)) (functionArgs value)
-
else unify (if shorthandOnlyDefinesConfig then { config = value; } else value);
+
shorthandToModule = if shorthandOnlyDefinesConfig == false
+
then value: value
+
else value: { config = value; };
allModules = defs: imap1 (n: { value, file }:
-
if isAttrs value || isFunction value then
-
# Annotate the value with the location of its definition for better error messages
-
coerce (lib.modules.unifyModuleSyntax file "${toString file}-${toString n}") value
+
if isFunction value
+
then setFunctionArgs
+
(args: lib.modules.unifyModuleSyntax file "${toString file}-${toString n}" (value args))
+
(functionArgs value)
+
else if isAttrs value
+
then
+
lib.modules.unifyModuleSyntax file "${toString file}-${toString n}" (shorthandToModule value)
else value
) defs;
···
then lhs.specialArgs // rhs.specialArgs
else throw "A submoduleWith option is declared multiple times with the same specialArgs \"${toString (attrNames intersecting)}\"";
shorthandOnlyDefinesConfig =
-
if lhs.shorthandOnlyDefinesConfig == rhs.shorthandOnlyDefinesConfig
+
if lhs.shorthandOnlyDefinesConfig == null
+
then rhs.shorthandOnlyDefinesConfig
+
else if rhs.shorthandOnlyDefinesConfig == null
+
then lhs.shorthandOnlyDefinesConfig
+
else if lhs.shorthandOnlyDefinesConfig == rhs.shorthandOnlyDefinesConfig
then lhs.shorthandOnlyDefinesConfig
else throw "A submoduleWith option is declared multiple times with conflicting shorthandOnlyDefinesConfig values";
};
+6
maintainers/maintainer-list.nix
···
githubId = 82591;
name = "Carl Sverre";
};
+
carpinchomug = {
+
email = "aki.suda@protonmail.com";
+
github = "carpinchomug";
+
githubId = 101536256;
+
name = "Akiyoshi Suda";
+
};
cartr = {
email = "carter.sande@duodecima.technology";
github = "cartr";
+4 -3
nixos/doc/manual/development/option-declarations.section.md
···
`type`
-
: The type of the option (see [](#sec-option-types)). It may be
-
omitted, but that's not advisable since it may lead to errors that
-
are hard to diagnose.
+
: The type of the option (see [](#sec-option-types)). This
+
argument is mandatory for nixpkgs modules. Setting this is highly
+
recommended for the sake of documentation and type checking. In case it is
+
not set, a fallback type with unspecified behavior is used.
`default`
+5 -3
nixos/doc/manual/from_md/development/option-declarations.section.xml
···
<listitem>
<para>
The type of the option (see
-
<xref linkend="sec-option-types" />). It may be omitted, but
-
that’s not advisable since it may lead to errors that are hard
-
to diagnose.
+
<xref linkend="sec-option-types" />). This argument is
+
mandatory for nixpkgs modules. Setting this is highly
+
recommended for the sake of documentation and type checking.
+
In case it is not set, a fallback type with unspecified
+
behavior is used.
</para>
</listitem>
</varlistentry>
+132
nixos/doc/manual/from_md/release-notes/rl-2205.section.xml
···
</listitem>
<listitem>
<para>
+
Services in the <literal>hadoop</literal> module previously
+
set <literal>openFirewall</literal> to true by default. This
+
has now been changed to false. Node definitions for multi-node
+
clusters would need <literal>openFirewall = true;</literal> to
+
be added to to hadoop services when upgrading from NixOS
+
21.11.
+
</para>
+
</listitem>
+
<listitem>
+
<para>
+
<literal>services.hadoop.yarn.nodemanager</literal> now uses
+
cgroup-based CPU limit enforcement by default. Additionally,
+
the option <literal>useCGroups</literal> was added to
+
nodemanagers as an easy way to switch back to the old
+
behavior.
+
</para>
+
</listitem>
+
<listitem>
+
<para>
The <literal>wafHook</literal> hook now honors
<literal>NIX_BUILD_CORES</literal> when
<literal>enableParallelBuilding</literal> is not set
···
</listitem>
<listitem>
<para>
+
Some improvements have been made to the
+
<literal>hadoop</literal> module:
+
</para>
+
<itemizedlist spacing="compact">
+
<listitem>
+
<para>
+
A <literal>gatewayRole</literal> option has been added,
+
for deploying hadoop cluster configuration files to a node
+
that does not have any active services
+
</para>
+
</listitem>
+
<listitem>
+
<para>
+
Support for older versions of hadoop have been added to
+
the module
+
</para>
+
</listitem>
+
<listitem>
+
<para>
+
Overriding and extending site XML files has been made
+
easier
+
</para>
+
</listitem>
+
</itemizedlist>
+
</listitem>
+
<listitem>
+
<para>
If you are using Wayland you can choose to use the Ozone
Wayland support in Chrome and several Electron apps by setting
the environment variable <literal>NIXOS_OZONE_WL=1</literal>
···
</listitem>
<listitem>
<para>
+
ORY Kratos was updated to version 0.8.3-alpha.1.pre.0, which
+
introduces some breaking changes:
+
</para>
+
<itemizedlist spacing="compact">
+
<listitem>
+
<para>
+
If you are relying on the SQLite images, update your
+
Docker Pull commands as follows:
+
</para>
+
<itemizedlist spacing="compact">
+
<listitem>
+
<para>
+
<literal>docker pull oryd/kratos:{version}</literal>
+
</para>
+
</listitem>
+
</itemizedlist>
+
</listitem>
+
<listitem>
+
<para>
+
Additionally, all passwords now have to be at least 8
+
characters long.
+
</para>
+
</listitem>
+
<listitem>
+
<para>
+
For more details, see:
+
</para>
+
<itemizedlist spacing="compact">
+
<listitem>
+
<para>
+
<link xlink:href="https://github.com/ory/kratos/releases/tag/v0.8.1-alpha.1">Release
+
Notes for v0.8.1-alpha-1</link>
+
</para>
+
</listitem>
+
<listitem>
+
<para>
+
<link xlink:href="https://github.com/ory/kratos/releases/tag/v0.8.2-alpha.1">Release
+
Notes for v0.8.2-alpha-1</link>
+
</para>
+
</listitem>
+
</itemizedlist>
+
</listitem>
+
</itemizedlist>
+
</listitem>
+
<listitem>
+
<para>
<literal>fetchFromSourcehut</literal> now allows fetching
repositories recursively using <literal>fetchgit</literal> or
<literal>fetchhg</literal> if the argument
···
default. It will automatically be enabled through services and
desktop environments as needed.
</para>
+
</listitem>
+
<listitem>
+
<para>
+
The <literal>hadoop</literal> package has added support for
+
<literal>aarch64-linux</literal> and
+
<literal>aarch64-darwin</literal> as of 3.3.1
+
(<link xlink:href="https://github.com/NixOS/nixpkgs/pull/158613">#158613</link>).
+
</para>
+
</listitem>
+
<listitem>
+
<para>
+
The <literal>R</literal> package now builds again on
+
<literal>aarch64-darwin</literal>
+
(<link xlink:href="https://github.com/NixOS/nixpkgs/pull/158992">#158992</link>).
+
</para>
+
</listitem>
+
<listitem>
+
<para>
+
The <literal>spark3</literal> package has been updated from
+
3.1.2 to 3.2.1
+
(<link xlink:href="https://github.com/NixOS/nixpkgs/pull/160075">#160075</link>):
+
</para>
+
<itemizedlist spacing="compact">
+
<listitem>
+
<para>
+
Testing has been enabled for
+
<literal>aarch64-linux</literal> in addition to
+
<literal>x86_64-linux</literal>.
+
</para>
+
</listitem>
+
<listitem>
+
<para>
+
The <literal>spark3</literal> package is now usable on
+
<literal>aarch64-darwin</literal> as a result of
+
<link xlink:href="https://github.com/NixOS/nixpkgs/pull/158613">#158613</link>
+
and
+
<link xlink:href="https://github.com/NixOS/nixpkgs/pull/158992">#158992</link>.
+
</para>
+
</listitem>
+
</itemizedlist>
</listitem>
</itemizedlist>
</section>
+30
nixos/doc/manual/release-notes/rl-2205.section.md
···
- The MoinMoin wiki engine (`services.moinmoin`) has been removed, because Python 2 is being retired from nixpkgs.
+
- Services in the `hadoop` module previously set `openFirewall` to true by default.
+
This has now been changed to false. Node definitions for multi-node clusters would need
+
`openFirewall = true;` to be added to to hadoop services when upgrading from NixOS 21.11.
+
+
- `services.hadoop.yarn.nodemanager` now uses cgroup-based CPU limit enforcement by default.
+
Additionally, the option `useCGroups` was added to nodemanagers as an easy way to switch
+
back to the old behavior.
+
- The `wafHook` hook now honors `NIX_BUILD_CORES` when `enableParallelBuilding` is not set explicitly. Packages can restore the old behaviour by setting `enableParallelBuilding=false`.
- `pkgs.claws-mail-gtk2`, representing Claws Mail's older release version three, was removed in order to get rid of Python 2.
···
- The `writers.writePyPy2`/`writers.writePyPy3` and corresponding `writers.writePyPy2Bin`/`writers.writePyPy3Bin` convenience functions to create executable Python 2/3 scripts using the PyPy interpreter were added.
+
- Some improvements have been made to the `hadoop` module:
+
- A `gatewayRole` option has been added, for deploying hadoop cluster configuration files to a node that does not have any active services
+
- Support for older versions of hadoop have been added to the module
+
- Overriding and extending site XML files has been made easier
+
- If you are using Wayland you can choose to use the Ozone Wayland support
in Chrome and several Electron apps by setting the environment variable
`NIXOS_OZONE_WL=1` (for example via
···
- `nixos-generate-config` now puts the dhcp configuration in `hardware-configuration.nix` instead of `configuration.nix`.
+
- ORY Kratos was updated to version 0.8.3-alpha.1.pre.0, which introduces some breaking changes:
+
- If you are relying on the SQLite images, update your Docker Pull commands as follows:
+
- `docker pull oryd/kratos:{version}`
+
- Additionally, all passwords now have to be at least 8 characters long.
+
- For more details, see:
+
- [Release Notes for v0.8.1-alpha-1](https://github.com/ory/kratos/releases/tag/v0.8.1-alpha.1)
+
- [Release Notes for v0.8.2-alpha-1](https://github.com/ory/kratos/releases/tag/v0.8.2-alpha.1)
+
- `fetchFromSourcehut` now allows fetching repositories recursively
using `fetchgit` or `fetchhg` if the argument `fetchSubmodules`
is set to `true`.
···
- `programs.tmux` has a new option `plugins` that accepts a list of packages from the `tmuxPlugins` group. The specified packages are added to the system and loaded by `tmux`.
- The polkit service, available at `security.polkit.enable`, is now disabled by default. It will automatically be enabled through services and desktop environments as needed.
+
+
- The `hadoop` package has added support for `aarch64-linux` and `aarch64-darwin` as of 3.3.1 ([#158613](https://github.com/NixOS/nixpkgs/pull/158613)).
+
+
- The `R` package now builds again on `aarch64-darwin` ([#158992](https://github.com/NixOS/nixpkgs/pull/158992)).
+
+
- The `spark3` package has been updated from 3.1.2 to 3.2.1 ([#160075](https://github.com/NixOS/nixpkgs/pull/160075)):
+
+
- Testing has been enabled for `aarch64-linux` in addition to `x86_64-linux`.
+
- The `spark3` package is now usable on `aarch64-darwin` as a result of [#158613](https://github.com/NixOS/nixpkgs/pull/158613) and [#158992](https://github.com/NixOS/nixpkgs/pull/158992).
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
+8 -1
nixos/lib/make-options-doc/mergeJSON.py
···
elif ov is not None or cur.get(ok, None) is None:
cur[ok] = ov
+
severity = "error" if warningsAreErrors else "warning"
+
# check that every option has a description
hasWarnings = False
for (k, v) in options.items():
if v.value.get('description', None) is None:
-
severity = "error" if warningsAreErrors else "warning"
hasWarnings = True
print(f"\x1b[1;31m{severity}: option {v.name} has no description\x1b[0m", file=sys.stderr)
v.value['description'] = "This option has no description."
+
if v.value.get('type', "unspecified") == "unspecified":
+
hasWarnings = True
+
print(
+
f"\x1b[1;31m{severity}: option {v.name} has no type. Please specify a valid type, see " +
+
"https://nixos.org/manual/nixos/stable/index.html#sec-option-types\x1b[0m", file=sys.stderr)
+
if hasWarnings and warningsAreErrors:
print(
"\x1b[1;31m" +
+202
nixos/lib/systemd-lib.nix
···
let
cfg = config.systemd;
lndir = "${pkgs.buildPackages.xorg.lndir}/bin/lndir";
+
systemd = cfg.package;
in rec {
shellEscape = s: (replaceChars [ "\\" ] [ "\\\\" ] s);
···
''}
''; # */
+
makeJobScript = name: text:
+
let
+
scriptName = replaceChars [ "\\" "@" ] [ "-" "_" ] (shellEscape name);
+
out = (pkgs.writeShellScriptBin scriptName ''
+
set -e
+
${text}
+
'').overrideAttrs (_: {
+
# The derivation name is different from the script file name
+
# to keep the script file name short to avoid cluttering logs.
+
name = "unit-script-${scriptName}";
+
});
+
in "${out}/bin/${scriptName}";
+
+
unitConfig = { config, options, ... }: {
+
config = {
+
unitConfig =
+
optionalAttrs (config.requires != [])
+
{ Requires = toString config.requires; }
+
// optionalAttrs (config.wants != [])
+
{ Wants = toString config.wants; }
+
// optionalAttrs (config.after != [])
+
{ After = toString config.after; }
+
// optionalAttrs (config.before != [])
+
{ Before = toString config.before; }
+
// optionalAttrs (config.bindsTo != [])
+
{ BindsTo = toString config.bindsTo; }
+
// optionalAttrs (config.partOf != [])
+
{ PartOf = toString config.partOf; }
+
// optionalAttrs (config.conflicts != [])
+
{ Conflicts = toString config.conflicts; }
+
// optionalAttrs (config.requisite != [])
+
{ Requisite = toString config.requisite; }
+
// optionalAttrs (config.restartTriggers != [])
+
{ X-Restart-Triggers = toString config.restartTriggers; }
+
// optionalAttrs (config.reloadTriggers != [])
+
{ X-Reload-Triggers = toString config.reloadTriggers; }
+
// optionalAttrs (config.description != "") {
+
Description = config.description; }
+
// optionalAttrs (config.documentation != []) {
+
Documentation = toString config.documentation; }
+
// optionalAttrs (config.onFailure != []) {
+
OnFailure = toString config.onFailure; }
+
// optionalAttrs (options.startLimitIntervalSec.isDefined) {
+
StartLimitIntervalSec = toString config.startLimitIntervalSec;
+
} // optionalAttrs (options.startLimitBurst.isDefined) {
+
StartLimitBurst = toString config.startLimitBurst;
+
};
+
};
+
};
+
+
serviceConfig = { name, config, ... }: {
+
config = mkMerge
+
[ { # Default path for systemd services. Should be quite minimal.
+
path = mkAfter
+
[ pkgs.coreutils
+
pkgs.findutils
+
pkgs.gnugrep
+
pkgs.gnused
+
systemd
+
];
+
environment.PATH = "${makeBinPath config.path}:${makeSearchPathOutput "bin" "sbin" config.path}";
+
}
+
(mkIf (config.preStart != "")
+
{ serviceConfig.ExecStartPre =
+
[ (makeJobScript "${name}-pre-start" config.preStart) ];
+
})
+
(mkIf (config.script != "")
+
{ serviceConfig.ExecStart =
+
makeJobScript "${name}-start" config.script + " " + config.scriptArgs;
+
})
+
(mkIf (config.postStart != "")
+
{ serviceConfig.ExecStartPost =
+
[ (makeJobScript "${name}-post-start" config.postStart) ];
+
})
+
(mkIf (config.reload != "")
+
{ serviceConfig.ExecReload =
+
makeJobScript "${name}-reload" config.reload;
+
})
+
(mkIf (config.preStop != "")
+
{ serviceConfig.ExecStop =
+
makeJobScript "${name}-pre-stop" config.preStop;
+
})
+
(mkIf (config.postStop != "")
+
{ serviceConfig.ExecStopPost =
+
makeJobScript "${name}-post-stop" config.postStop;
+
})
+
];
+
};
+
+
mountConfig = { config, ... }: {
+
config = {
+
mountConfig =
+
{ What = config.what;
+
Where = config.where;
+
} // optionalAttrs (config.type != "") {
+
Type = config.type;
+
} // optionalAttrs (config.options != "") {
+
Options = config.options;
+
};
+
};
+
};
+
+
automountConfig = { config, ... }: {
+
config = {
+
automountConfig =
+
{ Where = config.where;
+
};
+
};
+
};
+
+
commonUnitText = def: ''
+
[Unit]
+
${attrsToSection def.unitConfig}
+
'';
+
+
targetToUnit = name: def:
+
{ inherit (def) aliases wantedBy requiredBy enable;
+
text =
+
''
+
[Unit]
+
${attrsToSection def.unitConfig}
+
'';
+
};
+
+
serviceToUnit = name: def:
+
{ inherit (def) aliases wantedBy requiredBy enable;
+
text = commonUnitText def +
+
''
+
[Service]
+
${let env = cfg.globalEnvironment // def.environment;
+
in concatMapStrings (n:
+
let s = optionalString (env.${n} != null)
+
"Environment=${builtins.toJSON "${n}=${env.${n}}"}\n";
+
# systemd max line length is now 1MiB
+
# https://github.com/systemd/systemd/commit/e6dde451a51dc5aaa7f4d98d39b8fe735f73d2af
+
in if stringLength s >= 1048576 then throw "The value of the environment variable ‘${n}’ in systemd service ‘${name}.service’ is too long." else s) (attrNames env)}
+
${if def.reloadIfChanged then ''
+
X-ReloadIfChanged=true
+
'' else if !def.restartIfChanged then ''
+
X-RestartIfChanged=false
+
'' else ""}
+
${optionalString (!def.stopIfChanged) "X-StopIfChanged=false"}
+
${attrsToSection def.serviceConfig}
+
'';
+
};
+
+
socketToUnit = name: def:
+
{ inherit (def) aliases wantedBy requiredBy enable;
+
text = commonUnitText def +
+
''
+
[Socket]
+
${attrsToSection def.socketConfig}
+
${concatStringsSep "\n" (map (s: "ListenStream=${s}") def.listenStreams)}
+
${concatStringsSep "\n" (map (s: "ListenDatagram=${s}") def.listenDatagrams)}
+
'';
+
};
+
+
timerToUnit = name: def:
+
{ inherit (def) aliases wantedBy requiredBy enable;
+
text = commonUnitText def +
+
''
+
[Timer]
+
${attrsToSection def.timerConfig}
+
'';
+
};
+
+
pathToUnit = name: def:
+
{ inherit (def) aliases wantedBy requiredBy enable;
+
text = commonUnitText def +
+
''
+
[Path]
+
${attrsToSection def.pathConfig}
+
'';
+
};
+
+
mountToUnit = name: def:
+
{ inherit (def) aliases wantedBy requiredBy enable;
+
text = commonUnitText def +
+
''
+
[Mount]
+
${attrsToSection def.mountConfig}
+
'';
+
};
+
+
automountToUnit = name: def:
+
{ inherit (def) aliases wantedBy requiredBy enable;
+
text = commonUnitText def +
+
''
+
[Automount]
+
${attrsToSection def.automountConfig}
+
'';
+
};
+
+
sliceToUnit = name: def:
+
{ inherit (def) aliases wantedBy requiredBy enable;
+
text = commonUnitText def +
+
''
+
[Slice]
+
${attrsToSection def.sliceConfig}
+
'';
+
};
}
+5 -1
nixos/modules/misc/locate.nix
···
pruneNames = mkOption {
type = listOf str;
-
default = [ ".bzr" ".cache" ".git" ".hg" ".svn" ];
+
default = lib.optionals (!isFindutils) [ ".bzr" ".cache" ".git" ".hg" ".svn" ];
+
defaultText = literalDocBook ''
+
<literal>[ ".bzr" ".cache" ".git" ".hg" ".svn" ]</literal>, if
+
supported by the locate implementation (i.e. mlocate or plocate).
+
'';
description = ''
Directory components which should exclude paths containing them from indexing
'';
+5 -1
nixos/modules/misc/version.nix
···
concatStringsSep mapAttrsToList toLower
literalExpression mkRenamedOptionModule mkDefault mkOption trivial types;
+
needsEscaping = s: null != builtins.match "[a-zA-Z0-9]+" s;
+
escapeIfNeccessary = s: if needsEscaping s then s else ''"${lib.escape [ "\$" "\"" "\\" "\`" ] s}"'';
attrsToText = attrs:
-
concatStringsSep "\n" (mapAttrsToList (n: v: ''${n}="${toString v}"'') attrs);
+
concatStringsSep "\n" (
+
mapAttrsToList (n: v: ''${n}=${escapeIfNeccessary (toString v)}'') attrs
+
);
in
{
+11 -11
nixos/modules/services/cluster/hadoop/conf.nix
···
{ cfg, pkgs, lib }:
let
-
propertyXml = name: value: ''
+
propertyXml = name: value: lib.optionalString (value != null) ''
<property>
<name>${name}</name>
<value>${builtins.toString value}</value>
···
export HADOOP_LOG_DIR=/tmp/hadoop/$USER
'';
in
-
pkgs.runCommand "hadoop-conf" {} ''
+
pkgs.runCommand "hadoop-conf" {} (with cfg; ''
mkdir -p $out/
-
cp ${siteXml "core-site.xml" cfg.coreSite}/* $out/
-
cp ${siteXml "hdfs-site.xml" cfg.hdfsSite}/* $out/
-
cp ${siteXml "mapred-site.xml" cfg.mapredSite}/* $out/
-
cp ${siteXml "yarn-site.xml" cfg.yarnSite}/* $out/
-
cp ${siteXml "httpfs-site.xml" cfg.httpfsSite}/* $out/
-
cp ${cfgFile "container-executor.cfg" cfg.containerExecutorCfg}/* $out/
+
cp ${siteXml "core-site.xml" (coreSite // coreSiteInternal)}/* $out/
+
cp ${siteXml "hdfs-site.xml" (hdfsSiteDefault // hdfsSite // hdfsSiteInternal)}/* $out/
+
cp ${siteXml "mapred-site.xml" (mapredSiteDefault // mapredSite)}/* $out/
+
cp ${siteXml "yarn-site.xml" (yarnSiteDefault // yarnSite // yarnSiteInternal)}/* $out/
+
cp ${siteXml "httpfs-site.xml" httpfsSite}/* $out/
+
cp ${cfgFile "container-executor.cfg" containerExecutorCfg}/* $out/
cp ${pkgs.writeTextDir "hadoop-user-functions.sh" userFunctions}/* $out/
cp ${pkgs.writeTextDir "hadoop-env.sh" hadoopEnv}/* $out/
-
cp ${cfg.log4jProperties} $out/log4j.properties
-
${lib.concatMapStringsSep "\n" (dir: "cp -r ${dir}/* $out/") cfg.extraConfDirs}
-
''
+
cp ${log4jProperties} $out/log4j.properties
+
${lib.concatMapStringsSep "\n" (dir: "cp -r ${dir}/* $out/") extraConfDirs}
+
'')
+72 -25
nixos/modules/services/cluster/hadoop/default.nix
···
<link xlink:href="https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/core-default.xml"/>
'';
};
+
coreSiteInternal = mkOption {
+
default = {};
+
type = types.attrsOf types.anything;
+
internal = true;
+
description = ''
+
Internal option to add configs to core-site.xml based on module options
+
'';
+
};
-
hdfsSite = mkOption {
+
hdfsSiteDefault = mkOption {
default = {
"dfs.namenode.rpc-bind-host" = "0.0.0.0";
+
"dfs.namenode.http-address" = "0.0.0.0:9870";
+
"dfs.namenode.servicerpc-bind-host" = "0.0.0.0";
+
"dfs.namenode.http-bind-host" = "0.0.0.0";
};
type = types.attrsOf types.anything;
+
description = ''
+
Default options for hdfs-site.xml
+
'';
+
};
+
hdfsSite = mkOption {
+
default = {};
+
type = types.attrsOf types.anything;
example = literalExpression ''
{
"dfs.nameservices" = "namenode1";
}
'';
description = ''
-
Hadoop hdfs-site.xml definition
+
Additional options and overrides for hdfs-site.xml
<link xlink:href="https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-hdfs/hdfs-default.xml"/>
'';
};
+
hdfsSiteInternal = mkOption {
+
default = {};
+
type = types.attrsOf types.anything;
+
internal = true;
+
description = ''
+
Internal option to add configs to hdfs-site.xml based on module options
+
'';
+
};
-
mapredSite = mkOption {
+
mapredSiteDefault = mkOption {
default = {
"mapreduce.framework.name" = "yarn";
"yarn.app.mapreduce.am.env" = "HADOOP_MAPRED_HOME=${cfg.package}/lib/${cfg.package.untarDir}";
···
}
'';
type = types.attrsOf types.anything;
+
description = ''
+
Default options for mapred-site.xml
+
'';
+
};
+
mapredSite = mkOption {
+
default = {};
+
type = types.attrsOf types.anything;
example = literalExpression ''
-
options.services.hadoop.mapredSite.default // {
+
{
"mapreduce.map.java.opts" = "-Xmx900m -XX:+UseParallelGC";
}
'';
description = ''
-
Hadoop mapred-site.xml definition
+
Additional options and overrides for mapred-site.xml
<link xlink:href="https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/mapred-default.xml"/>
'';
};
-
yarnSite = mkOption {
+
yarnSiteDefault = mkOption {
default = {
"yarn.nodemanager.admin-env" = "PATH=$PATH";
"yarn.nodemanager.aux-services" = "mapreduce_shuffle";
···
"yarn.nodemanager.linux-container-executor.path" = "/run/wrappers/yarn-nodemanager/bin/container-executor";
"yarn.nodemanager.log-dirs" = "/var/log/hadoop/yarn/nodemanager";
"yarn.resourcemanager.bind-host" = "0.0.0.0";
-
"yarn.resourcemanager.scheduler.class" = "org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler";
+
"yarn.resourcemanager.scheduler.class" = "org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler";
};
type = types.attrsOf types.anything;
+
description = ''
+
Default options for yarn-site.xml
+
'';
+
};
+
yarnSite = mkOption {
+
default = {};
+
type = types.attrsOf types.anything;
example = literalExpression ''
-
options.services.hadoop.yarnSite.default // {
+
{
"yarn.resourcemanager.hostname" = "''${config.networking.hostName}";
}
'';
description = ''
-
Hadoop yarn-site.xml definition
+
Additional options and overrides for yarn-site.xml
<link xlink:href="https://hadoop.apache.org/docs/current/hadoop-yarn/hadoop-yarn-common/yarn-default.xml"/>
'';
};
+
yarnSiteInternal = mkOption {
+
default = {};
+
type = types.attrsOf types.anything;
+
internal = true;
+
description = ''
+
Internal option to add configs to yarn-site.xml based on module options
+
'';
+
};
httpfsSite = mkOption {
default = { };
···
"yarn.nodemanager.linux-container-executor.group"="hadoop";
"min.user.id"=1000;
"feature.terminal.enabled"=1;
+
"feature.mount-cgroup.enabled" = 1;
};
type = types.attrsOf types.anything;
example = literalExpression ''
···
description = "Directories containing additional config files to be added to HADOOP_CONF_DIR";
};
+
gatewayRole.enable = mkEnableOption "gateway role for deploying hadoop configs";
+
package = mkOption {
type = types.package;
default = pkgs.hadoop;
···
};
-
config = mkMerge [
-
(mkIf (builtins.hasAttr "yarn" config.users.users ||
-
builtins.hasAttr "hdfs" config.users.users ||
-
builtins.hasAttr "httpfs" config.users.users) {
-
users.groups.hadoop = {
-
gid = config.ids.gids.hadoop;
-
};
-
environment = {
-
systemPackages = [ cfg.package ];
-
etc."hadoop-conf".source = let
-
hadoopConf = "${import ./conf.nix { inherit cfg pkgs lib; }}/";
-
in "${hadoopConf}";
-
};
-
})
-
-
];
+
config = mkIf cfg.gatewayRole.enable {
+
users.groups.hadoop = {
+
gid = config.ids.gids.hadoop;
+
};
+
environment = {
+
systemPackages = [ cfg.package ];
+
etc."hadoop-conf".source = let
+
hadoopConf = "${import ./conf.nix { inherit cfg pkgs lib; }}/";
+
in "${hadoopConf}";
+
variables.HADOOP_CONF_DIR = "/etc/hadoop-conf/";
+
};
+
};
}
+147 -146
nixos/modules/services/cluster/hadoop/hdfs.nix
···
-
{ config, lib, pkgs, ...}:
+
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.hadoop;
+
+
# Config files for hadoop services
hadoopConf = "${import ./conf.nix { inherit cfg pkgs lib; }}/";
-
restartIfChanged = mkOption {
-
type = types.bool;
-
description = ''
-
Automatically restart the service on config change.
-
This can be set to false to defer restarts on clusters running critical applications.
-
Please consider the security implications of inadvertently running an older version,
-
and the possibility of unexpected behavior caused by inconsistent versions across a cluster when disabling this option.
-
'';
-
default = false;
-
};
+
+
# Generator for HDFS service options
+
hadoopServiceOption = { serviceName, firewallOption ? true, extraOpts ? null }: {
+
enable = mkEnableOption serviceName;
+
restartIfChanged = mkOption {
+
type = types.bool;
+
description = ''
+
Automatically restart the service on config change.
+
This can be set to false to defer restarts on clusters running critical applications.
+
Please consider the security implications of inadvertently running an older version,
+
and the possibility of unexpected behavior caused by inconsistent versions across a cluster when disabling this option.
+
'';
+
default = false;
+
};
+
extraFlags = mkOption{
+
type = with types; listOf str;
+
default = [];
+
description = "Extra command line flags to pass to ${serviceName}";
+
example = [
+
"-Dcom.sun.management.jmxremote"
+
"-Dcom.sun.management.jmxremote.port=8010"
+
];
+
};
+
extraEnv = mkOption{
+
type = with types; attrsOf str;
+
default = {};
+
description = "Extra environment variables for ${serviceName}";
+
};
+
} // (optionalAttrs firewallOption {
+
openFirewall = mkOption {
+
type = types.bool;
+
default = false;
+
description = "Open firewall ports for ${serviceName}.";
+
};
+
}) // (optionalAttrs (extraOpts != null) extraOpts);
+
+
# Generator for HDFS service configs
+
hadoopServiceConfig =
+
{ name
+
, serviceOptions ? cfg.hdfs."${toLower name}"
+
, description ? "Hadoop HDFS ${name}"
+
, User ? "hdfs"
+
, allowedTCPPorts ? [ ]
+
, preStart ? ""
+
, environment ? { }
+
, extraConfig ? { }
+
}: (
+
+
mkIf serviceOptions.enable ( mkMerge [{
+
systemd.services."hdfs-${toLower name}" = {
+
inherit description preStart;
+
environment = environment // serviceOptions.extraEnv;
+
wantedBy = [ "multi-user.target" ];
+
inherit (serviceOptions) restartIfChanged;
+
serviceConfig = {
+
inherit User;
+
SyslogIdentifier = "hdfs-${toLower name}";
+
ExecStart = "${cfg.package}/bin/hdfs --config ${hadoopConf} ${toLower name} ${escapeShellArgs serviceOptions.extraFlags}";
+
Restart = "always";
+
};
+
};
+
+
services.hadoop.gatewayRole.enable = true;
+
+
networking.firewall.allowedTCPPorts = mkIf
+
((builtins.hasAttr "openFirewall" serviceOptions) && serviceOptions.openFirewall)
+
allowedTCPPorts;
+
} extraConfig])
+
);
+
in
{
options.services.hadoop.hdfs = {
-
namenode = {
-
enable = mkEnableOption "Whether to run the HDFS NameNode";
+
+
namenode = hadoopServiceOption { serviceName = "HDFS NameNode"; } // {
formatOnInit = mkOption {
type = types.bool;
default = false;
description = ''
-
Format HDFS namenode on first start. This is useful for quickly spinning up ephemeral HDFS clusters with a single namenode.
-
For HA clusters, initialization involves multiple steps across multiple nodes. Follow [this guide](https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-hdfs/HDFSHighAvailabilityWithQJM.html)
-
to initialize an HA cluster manually.
-
'';
-
};
-
inherit restartIfChanged;
-
openFirewall = mkOption {
-
type = types.bool;
-
default = true;
-
description = ''
-
Open firewall ports for namenode
+
Format HDFS namenode on first start. This is useful for quickly spinning up
+
ephemeral HDFS clusters with a single namenode.
+
For HA clusters, initialization involves multiple steps across multiple nodes.
+
Follow this guide to initialize an HA cluster manually:
+
<link xlink:href="https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-hdfs/HDFSHighAvailabilityWithQJM.html"/>
'';
};
};
-
datanode = {
-
enable = mkEnableOption "Whether to run the HDFS DataNode";
-
inherit restartIfChanged;
-
openFirewall = mkOption {
-
type = types.bool;
-
default = true;
-
description = ''
-
Open firewall ports for datanode
-
'';
+
+
datanode = hadoopServiceOption { serviceName = "HDFS DataNode"; } // {
+
dataDirs = mkOption {
+
default = null;
+
description = "Tier and path definitions for datanode storage.";
+
type = with types; nullOr (listOf (submodule {
+
options = {
+
type = mkOption {
+
type = enum [ "SSD" "DISK" "ARCHIVE" "RAM_DISK" ];
+
description = ''
+
Storage types ([SSD]/[DISK]/[ARCHIVE]/[RAM_DISK]) for HDFS storage policies.
+
'';
+
};
+
path = mkOption {
+
type = path;
+
example = [ "/var/lib/hadoop/hdfs/dn" ];
+
description = "Determines where on the local filesystem a data node should store its blocks.";
+
};
+
};
+
}));
};
};
-
journalnode = {
-
enable = mkEnableOption "Whether to run the HDFS JournalNode";
-
inherit restartIfChanged;
-
openFirewall = mkOption {
-
type = types.bool;
-
default = true;
-
description = ''
-
Open firewall ports for journalnode
-
'';
-
};
+
+
journalnode = hadoopServiceOption { serviceName = "HDFS JournalNode"; };
+
+
zkfc = hadoopServiceOption {
+
serviceName = "HDFS ZooKeeper failover controller";
+
firewallOption = false;
};
-
zkfc = {
-
enable = mkEnableOption "Whether to run the HDFS ZooKeeper failover controller";
-
inherit restartIfChanged;
-
};
-
httpfs = {
-
enable = mkEnableOption "Whether to run the HDFS HTTPfs server";
+
+
httpfs = hadoopServiceOption { serviceName = "HDFS JournalNode"; } // {
tempPath = mkOption {
type = types.path;
default = "/tmp/hadoop/httpfs";
-
description = ''
-
HTTPFS_TEMP path used by HTTPFS
-
'';
-
};
-
inherit restartIfChanged;
-
openFirewall = mkOption {
-
type = types.bool;
-
default = true;
-
description = ''
-
Open firewall ports for HTTPFS
-
'';
+
description = "HTTPFS_TEMP path used by HTTPFS";
};
};
+
};
config = mkMerge [
-
(mkIf cfg.hdfs.namenode.enable {
-
systemd.services.hdfs-namenode = {
-
description = "Hadoop HDFS NameNode";
-
wantedBy = [ "multi-user.target" ];
-
inherit (cfg.hdfs.namenode) restartIfChanged;
-
-
preStart = (mkIf cfg.hdfs.namenode.formatOnInit ''
-
${cfg.package}/bin/hdfs --config ${hadoopConf} namenode -format -nonInteractive || true
-
'');
-
-
serviceConfig = {
-
User = "hdfs";
-
SyslogIdentifier = "hdfs-namenode";
-
ExecStart = "${cfg.package}/bin/hdfs --config ${hadoopConf} namenode";
-
Restart = "always";
-
};
-
};
-
-
networking.firewall.allowedTCPPorts = (mkIf cfg.hdfs.namenode.openFirewall [
+
(hadoopServiceConfig {
+
name = "NameNode";
+
allowedTCPPorts = [
9870 # namenode.http-address
8020 # namenode.rpc-address
-
8022 # namenode. servicerpc-address
-
]);
+
8022 # namenode.servicerpc-address
+
8019 # dfs.ha.zkfc.port
+
];
+
preStart = (mkIf cfg.hdfs.namenode.formatOnInit
+
"${cfg.package}/bin/hdfs --config ${hadoopConf} namenode -format -nonInteractive || true"
+
);
})
-
(mkIf cfg.hdfs.datanode.enable {
-
systemd.services.hdfs-datanode = {
-
description = "Hadoop HDFS DataNode";
-
wantedBy = [ "multi-user.target" ];
-
inherit (cfg.hdfs.datanode) restartIfChanged;
-
serviceConfig = {
-
User = "hdfs";
-
SyslogIdentifier = "hdfs-datanode";
-
ExecStart = "${cfg.package}/bin/hdfs --config ${hadoopConf} datanode";
-
Restart = "always";
-
};
-
};
-
-
networking.firewall.allowedTCPPorts = (mkIf cfg.hdfs.datanode.openFirewall [
+
(hadoopServiceConfig {
+
name = "DataNode";
+
# port numbers for datanode changed between hadoop 2 and 3
+
allowedTCPPorts = if versionAtLeast cfg.package.version "3" then [
9864 # datanode.http.address
9866 # datanode.address
9867 # datanode.ipc.address
-
]);
+
] else [
+
50075 # datanode.http.address
+
50010 # datanode.address
+
50020 # datanode.ipc.address
+
];
+
extraConfig.services.hadoop.hdfsSiteInternal."dfs.datanode.data.dir" = let d = cfg.hdfs.datanode.dataDirs; in
+
if (d!= null) then (concatMapStringsSep "," (x: "["+x.type+"]file://"+x.path) cfg.hdfs.datanode.dataDirs) else d;
})
-
(mkIf cfg.hdfs.journalnode.enable {
-
systemd.services.hdfs-journalnode = {
-
description = "Hadoop HDFS JournalNode";
-
wantedBy = [ "multi-user.target" ];
-
inherit (cfg.hdfs.journalnode) restartIfChanged;
-
serviceConfig = {
-
User = "hdfs";
-
SyslogIdentifier = "hdfs-journalnode";
-
ExecStart = "${cfg.package}/bin/hdfs --config ${hadoopConf} journalnode";
-
Restart = "always";
-
};
-
};
-
-
networking.firewall.allowedTCPPorts = (mkIf cfg.hdfs.journalnode.openFirewall [
+
(hadoopServiceConfig {
+
name = "JournalNode";
+
allowedTCPPorts = [
8480 # dfs.journalnode.http-address
8485 # dfs.journalnode.rpc-address
-
]);
+
];
})
-
(mkIf cfg.hdfs.zkfc.enable {
-
systemd.services.hdfs-zkfc = {
-
description = "Hadoop HDFS ZooKeeper failover controller";
-
wantedBy = [ "multi-user.target" ];
-
inherit (cfg.hdfs.zkfc) restartIfChanged;
-
serviceConfig = {
-
User = "hdfs";
-
SyslogIdentifier = "hdfs-zkfc";
-
ExecStart = "${cfg.package}/bin/hdfs --config ${hadoopConf} zkfc";
-
Restart = "always";
-
};
-
};
+
(hadoopServiceConfig {
+
name = "zkfc";
+
description = "Hadoop HDFS ZooKeeper failover controller";
})
-
(mkIf cfg.hdfs.httpfs.enable {
-
systemd.services.hdfs-httpfs = {
-
description = "Hadoop httpfs";
-
wantedBy = [ "multi-user.target" ];
-
inherit (cfg.hdfs.httpfs) restartIfChanged;
-
environment.HTTPFS_TEMP = cfg.hdfs.httpfs.tempPath;
-
-
preStart = ''
-
mkdir -p $HTTPFS_TEMP
-
'';
-
-
serviceConfig = {
-
User = "httpfs";
-
SyslogIdentifier = "hdfs-httpfs";
-
ExecStart = "${cfg.package}/bin/hdfs --config ${hadoopConf} httpfs";
-
Restart = "always";
-
};
-
};
-
networking.firewall.allowedTCPPorts = (mkIf cfg.hdfs.httpfs.openFirewall [
+
(hadoopServiceConfig {
+
name = "HTTPFS";
+
environment.HTTPFS_TEMP = cfg.hdfs.httpfs.tempPath;
+
preStart = "mkdir -p $HTTPFS_TEMP";
+
User = "httpfs";
+
allowedTCPPorts = [
14000 # httpfs.http.port
-
]);
+
];
})
-
(mkIf (
-
cfg.hdfs.namenode.enable || cfg.hdfs.datanode.enable || cfg.hdfs.journalnode.enable || cfg.hdfs.zkfc.enable
-
) {
+
+
(mkIf cfg.gatewayRole.enable {
users.users.hdfs = {
description = "Hadoop HDFS user";
group = "hadoop";
···
isSystemUser = true;
};
})
+
];
}
+85 -13
nixos/modules/services/cluster/hadoop/yarn.nix
···
'';
default = false;
};
+
extraFlags = mkOption{
+
type = with types; listOf str;
+
default = [];
+
description = "Extra command line flags to pass to the service";
+
example = [
+
"-Dcom.sun.management.jmxremote"
+
"-Dcom.sun.management.jmxremote.port=8010"
+
];
+
};
+
extraEnv = mkOption{
+
type = with types; attrsOf str;
+
default = {};
+
description = "Extra environment variables";
+
};
in
{
options.services.hadoop.yarn = {
resourcemanager = {
-
enable = mkEnableOption "Whether to run the Hadoop YARN ResourceManager";
-
inherit restartIfChanged;
+
enable = mkEnableOption "Hadoop YARN ResourceManager";
+
inherit restartIfChanged extraFlags extraEnv;
+
openFirewall = mkOption {
type = types.bool;
-
default = true;
+
default = false;
description = ''
Open firewall ports for resourcemanager
'';
};
};
nodemanager = {
-
enable = mkEnableOption "Whether to run the Hadoop YARN NodeManager";
-
inherit restartIfChanged;
+
enable = mkEnableOption "Hadoop YARN NodeManager";
+
inherit restartIfChanged extraFlags extraEnv;
+
+
resource = {
+
cpuVCores = mkOption {
+
description = "Number of vcores that can be allocated for containers.";
+
type = with types; nullOr ints.positive;
+
default = null;
+
};
+
maximumAllocationVCores = mkOption {
+
description = "The maximum virtual CPU cores any container can be allocated.";
+
type = with types; nullOr ints.positive;
+
default = null;
+
};
+
memoryMB = mkOption {
+
description = "Amount of physical memory, in MB, that can be allocated for containers.";
+
type = with types; nullOr ints.positive;
+
default = null;
+
};
+
maximumAllocationMB = mkOption {
+
description = "The maximum physical memory any container can be allocated.";
+
type = with types; nullOr ints.positive;
+
default = null;
+
};
+
};
+
+
useCGroups = mkOption {
+
type = types.bool;
+
default = true;
+
description = ''
+
Use cgroups to enforce resource limits on containers
+
'';
+
};
+
+
localDir = mkOption {
+
description = "List of directories to store localized files in.";
+
type = with types; nullOr (listOf path);
+
example = [ "/var/lib/hadoop/yarn/nm" ];
+
default = null;
+
};
+
addBinBash = mkOption {
type = types.bool;
default = true;
···
};
openFirewall = mkOption {
type = types.bool;
-
default = true;
+
default = false;
description = ''
Open firewall ports for nodemanager.
Because containers can listen on any ephemeral port, TCP ports 1024–65535 will be opened.
···
};
config = mkMerge [
-
(mkIf (
-
cfg.yarn.resourcemanager.enable || cfg.yarn.nodemanager.enable
-
) {
-
+
(mkIf cfg.gatewayRole.enable {
users.users.yarn = {
description = "Hadoop YARN user";
group = "hadoop";
···
description = "Hadoop YARN ResourceManager";
wantedBy = [ "multi-user.target" ];
inherit (cfg.yarn.resourcemanager) restartIfChanged;
+
environment = cfg.yarn.resourcemanager.extraEnv;
serviceConfig = {
User = "yarn";
SyslogIdentifier = "yarn-resourcemanager";
ExecStart = "${cfg.package}/bin/yarn --config ${hadoopConf} " +
-
" resourcemanager";
+
" resourcemanager ${escapeShellArgs cfg.yarn.resourcemanager.extraFlags}";
Restart = "always";
};
};
+
+
services.hadoop.gatewayRole.enable = true;
+
networking.firewall.allowedTCPPorts = (mkIf cfg.yarn.resourcemanager.openFirewall [
8088 # resourcemanager.webapp.address
8030 # resourcemanager.scheduler.address
···
description = "Hadoop YARN NodeManager";
wantedBy = [ "multi-user.target" ];
inherit (cfg.yarn.nodemanager) restartIfChanged;
+
environment = cfg.yarn.nodemanager.extraEnv;
preStart = ''
# create log dir
···
chown yarn:hadoop /var/log/hadoop/yarn/nodemanager
# set up setuid container executor binary
+
umount /run/wrappers/yarn-nodemanager/cgroup/cpu || true
rm -rf /run/wrappers/yarn-nodemanager/ || true
-
mkdir -p /run/wrappers/yarn-nodemanager/{bin,etc/hadoop}
+
mkdir -p /run/wrappers/yarn-nodemanager/{bin,etc/hadoop,cgroup/cpu}
cp ${cfg.package}/lib/${cfg.package.untarDir}/bin/container-executor /run/wrappers/yarn-nodemanager/bin/
chgrp hadoop /run/wrappers/yarn-nodemanager/bin/container-executor
chmod 6050 /run/wrappers/yarn-nodemanager/bin/container-executor
···
SyslogIdentifier = "yarn-nodemanager";
PermissionsStartOnly = true;
ExecStart = "${cfg.package}/bin/yarn --config ${hadoopConf} " +
-
" nodemanager";
+
" nodemanager ${escapeShellArgs cfg.yarn.nodemanager.extraFlags}";
Restart = "always";
};
+
};
+
+
services.hadoop.gatewayRole.enable = true;
+
+
services.hadoop.yarnSiteInternal = with cfg.yarn.nodemanager; {
+
"yarn.nodemanager.local-dirs" = localDir;
+
"yarn.scheduler.maximum-allocation-vcores" = resource.maximumAllocationVCores;
+
"yarn.scheduler.maximum-allocation-mb" = resource.maximumAllocationMB;
+
"yarn.nodemanager.resource.cpu-vcores" = resource.cpuVCores;
+
"yarn.nodemanager.resource.memory-mb" = resource.memoryMB;
+
} // mkIf useCGroups {
+
"yarn.nodemanager.linux-container-executor.cgroups.hierarchy" = "/hadoop-yarn";
+
"yarn.nodemanager.linux-container-executor.resources-handler.class" = "org.apache.hadoop.yarn.server.nodemanager.util.CgroupsLCEResourcesHandler";
+
"yarn.nodemanager.linux-container-executor.cgroups.mount" = "true";
+
"yarn.nodemanager.linux-container-executor.cgroups.mount-path" = "/run/wrappers/yarn-nodemanager/cgroup";
};
networking.firewall.allowedTCPPortRanges = [
+8 -12
nixos/modules/services/networking/nsd.nix
···
zone.children
);
-
# fighting infinite recursion
-
zoneOptions = zoneOptionsRaw // childConfig zoneOptions1 true;
-
zoneOptions1 = zoneOptionsRaw // childConfig zoneOptions2 false;
-
zoneOptions2 = zoneOptionsRaw // childConfig zoneOptions3 false;
-
zoneOptions3 = zoneOptionsRaw // childConfig zoneOptions4 false;
-
zoneOptions4 = zoneOptionsRaw // childConfig zoneOptions5 false;
-
zoneOptions5 = zoneOptionsRaw // childConfig zoneOptions6 false;
-
zoneOptions6 = zoneOptionsRaw // childConfig null false;
-
-
childConfig = x: v: { options.children = { type = types.attrsOf x; visible = v; }; };
-
# options are ordered alphanumerically
-
zoneOptionsRaw = types.submodule {
+
zoneOptions = types.submodule {
options = {
allowAXFRFallback = mkOption {
···
};
children = mkOption {
+
# TODO: This relies on the fact that `types.anything` doesn't set any
+
# values of its own to any defaults, because in the above zoneConfigs',
+
# values from children override ones from parents, but only if the
+
# attributes are defined. Because of this, we can't replace the element
+
# type here with `zoneConfigs`, since that would set all the attributes
+
# to default values, breaking the parent inheriting function.
+
type = types.attrsOf types.anything;
default = {};
description = ''
Children zones inherit all options of their parents. Attributes
+1
nixos/modules/services/networking/unbound.nix
···
};
stateDir = mkOption {
+
type = types.path;
default = "/var/lib/unbound";
description = "Directory holding all state for unbound to run.";
};
+1
nixos/modules/services/networking/vsftpd.nix
···
userlist = mkOption {
default = [];
+
type = types.listOf types.str;
description = "See <option>userlistFile</option>.";
};
+1 -10
nixos/modules/services/web-apps/nextcloud.nix
···
package = mkOption {
type = types.package;
description = "Which package to use for the Nextcloud instance.";
-
relatedPackages = [ "nextcloud21" "nextcloud22" "nextcloud23" ];
+
relatedPackages = [ "nextcloud22" "nextcloud23" ];
};
phpPackage = mkOption {
type = types.package;
···
nextcloud defined in an overlay, please set `services.nextcloud.package` to
`pkgs.nextcloud`.
''
-
# 21.03 will not be an official release - it was instead 21.05.
-
# This versionOlder statement remains set to 21.03 for backwards compatibility.
-
# See https://github.com/NixOS/nixpkgs/pull/108899 and
-
# https://github.com/NixOS/rfcs/blob/master/rfcs/0080-nixos-release-schedule.md.
-
# FIXME(@Ma27) remove this else-if as soon as 21.05 is EOL! This is only here
-
# to ensure that users who are on Nextcloud 19 with a stateVersion <21.05 with
-
# no explicit services.nextcloud.package don't upgrade to v21 by accident (
-
# nextcloud20 throws an eval-error because it's dropped).
-
else if versionOlder stateVersion "21.03" then nextcloud20
else if versionOlder stateVersion "21.11" then nextcloud21
else if versionOlder stateVersion "22.05" then nextcloud22
else nextcloud23
+18
nixos/modules/services/x11/display-managers/default.nix
···
session = mkOption {
default = [];
+
type = with types; listOf (submodule ({ ... }: {
+
options = {
+
manage = mkOption {
+
description = "Whether this is a desktop or a window manager";
+
type = enum [ "desktop" "window" ];
+
};
+
+
name = mkOption {
+
description = "Name of this session";
+
type = str;
+
};
+
+
start = mkOption {
+
description = "Commands to run to start this session";
+
type = lines;
+
};
+
};
+
}));
example = literalExpression
''
[ { manage = "desktop";
+1 -1
nixos/modules/system/boot/kernel.nix
···
boot.kernelPackages = mkOption {
default = pkgs.linuxPackages;
-
type = types.unspecified // { merge = mergeEqualOption; };
+
type = types.raw;
apply = kernelPackages: kernelPackages.extend (self: super: {
kernel = super.kernel.override (originalArgs: {
inherit randstructSeed;
+1 -1
nixos/modules/system/boot/stage-1.nix
···
else "gzip"
);
defaultText = literalDocBook "<literal>zstd</literal> if the kernel supports it (5.9+), <literal>gzip</literal> if not";
-
type = types.unspecified; # We don't have a function type...
+
type = types.either types.str (types.functionTo types.str);
description = ''
The compressor to use on the initrd image. May be any of:
+16 -201
nixos/modules/system/boot/systemd.nix
···
systemd = cfg.package;
+
inherit (systemdUtils.lib)
+
makeJobScript
+
unitConfig
+
serviceConfig
+
mountConfig
+
automountConfig
+
commonUnitText
+
targetToUnit
+
serviceToUnit
+
socketToUnit
+
timerToUnit
+
pathToUnit
+
mountToUnit
+
automountToUnit
+
sliceToUnit;
+
upstreamSystemUnits =
[ # Targets.
"basic.target"
···
"xdg-desktop-autostart.target"
];
-
makeJobScript = name: text:
-
let
-
scriptName = replaceChars [ "\\" "@" ] [ "-" "_" ] (shellEscape name);
-
out = (pkgs.writeShellScriptBin scriptName ''
-
set -e
-
${text}
-
'').overrideAttrs (_: {
-
# The derivation name is different from the script file name
-
# to keep the script file name short to avoid cluttering logs.
-
name = "unit-script-${scriptName}";
-
});
-
in "${out}/bin/${scriptName}";
-
-
unitConfig = { config, options, ... }: {
-
config = {
-
unitConfig =
-
optionalAttrs (config.requires != [])
-
{ Requires = toString config.requires; }
-
// optionalAttrs (config.wants != [])
-
{ Wants = toString config.wants; }
-
// optionalAttrs (config.after != [])
-
{ After = toString config.after; }
-
// optionalAttrs (config.before != [])
-
{ Before = toString config.before; }
-
// optionalAttrs (config.bindsTo != [])
-
{ BindsTo = toString config.bindsTo; }
-
// optionalAttrs (config.partOf != [])
-
{ PartOf = toString config.partOf; }
-
// optionalAttrs (config.conflicts != [])
-
{ Conflicts = toString config.conflicts; }
-
// optionalAttrs (config.requisite != [])
-
{ Requisite = toString config.requisite; }
-
// optionalAttrs (config.restartTriggers != [])
-
{ X-Restart-Triggers = toString config.restartTriggers; }
-
// optionalAttrs (config.reloadTriggers != [])
-
{ X-Reload-Triggers = toString config.reloadTriggers; }
-
// optionalAttrs (config.description != "") {
-
Description = config.description; }
-
// optionalAttrs (config.documentation != []) {
-
Documentation = toString config.documentation; }
-
// optionalAttrs (config.onFailure != []) {
-
OnFailure = toString config.onFailure; }
-
// optionalAttrs (options.startLimitIntervalSec.isDefined) {
-
StartLimitIntervalSec = toString config.startLimitIntervalSec;
-
} // optionalAttrs (options.startLimitBurst.isDefined) {
-
StartLimitBurst = toString config.startLimitBurst;
-
};
-
};
-
};
-
-
serviceConfig = { name, config, ... }: {
-
config = mkMerge
-
[ { # Default path for systemd services. Should be quite minimal.
-
path = mkAfter
-
[ pkgs.coreutils
-
pkgs.findutils
-
pkgs.gnugrep
-
pkgs.gnused
-
systemd
-
];
-
environment.PATH = "${makeBinPath config.path}:${makeSearchPathOutput "bin" "sbin" config.path}";
-
}
-
(mkIf (config.preStart != "")
-
{ serviceConfig.ExecStartPre =
-
[ (makeJobScript "${name}-pre-start" config.preStart) ];
-
})
-
(mkIf (config.script != "")
-
{ serviceConfig.ExecStart =
-
makeJobScript "${name}-start" config.script + " " + config.scriptArgs;
-
})
-
(mkIf (config.postStart != "")
-
{ serviceConfig.ExecStartPost =
-
[ (makeJobScript "${name}-post-start" config.postStart) ];
-
})
-
(mkIf (config.reload != "")
-
{ serviceConfig.ExecReload =
-
makeJobScript "${name}-reload" config.reload;
-
})
-
(mkIf (config.preStop != "")
-
{ serviceConfig.ExecStop =
-
makeJobScript "${name}-pre-stop" config.preStop;
-
})
-
(mkIf (config.postStop != "")
-
{ serviceConfig.ExecStopPost =
-
makeJobScript "${name}-post-stop" config.postStop;
-
})
-
];
-
};
-
-
mountConfig = { config, ... }: {
-
config = {
-
mountConfig =
-
{ What = config.what;
-
Where = config.where;
-
} // optionalAttrs (config.type != "") {
-
Type = config.type;
-
} // optionalAttrs (config.options != "") {
-
Options = config.options;
-
};
-
};
-
};
-
-
automountConfig = { config, ... }: {
-
config = {
-
automountConfig =
-
{ Where = config.where;
-
};
-
};
-
};
-
-
commonUnitText = def: ''
-
[Unit]
-
${attrsToSection def.unitConfig}
-
'';
-
-
targetToUnit = name: def:
-
{ inherit (def) aliases wantedBy requiredBy enable;
-
text =
-
''
-
[Unit]
-
${attrsToSection def.unitConfig}
-
'';
-
};
-
-
serviceToUnit = name: def:
-
{ inherit (def) aliases wantedBy requiredBy enable;
-
text = commonUnitText def +
-
''
-
[Service]
-
${let env = cfg.globalEnvironment // def.environment;
-
in concatMapStrings (n:
-
let s = optionalString (env.${n} != null)
-
"Environment=${builtins.toJSON "${n}=${env.${n}}"}\n";
-
# systemd max line length is now 1MiB
-
# https://github.com/systemd/systemd/commit/e6dde451a51dc5aaa7f4d98d39b8fe735f73d2af
-
in if stringLength s >= 1048576 then throw "The value of the environment variable ‘${n}’ in systemd service ‘${name}.service’ is too long." else s) (attrNames env)}
-
${if def.reloadIfChanged then ''
-
X-ReloadIfChanged=true
-
'' else if !def.restartIfChanged then ''
-
X-RestartIfChanged=false
-
'' else ""}
-
${optionalString (!def.stopIfChanged) "X-StopIfChanged=false"}
-
${attrsToSection def.serviceConfig}
-
'';
-
};
-
-
socketToUnit = name: def:
-
{ inherit (def) aliases wantedBy requiredBy enable;
-
text = commonUnitText def +
-
''
-
[Socket]
-
${attrsToSection def.socketConfig}
-
${concatStringsSep "\n" (map (s: "ListenStream=${s}") def.listenStreams)}
-
${concatStringsSep "\n" (map (s: "ListenDatagram=${s}") def.listenDatagrams)}
-
'';
-
};
-
-
timerToUnit = name: def:
-
{ inherit (def) aliases wantedBy requiredBy enable;
-
text = commonUnitText def +
-
''
-
[Timer]
-
${attrsToSection def.timerConfig}
-
'';
-
};
-
-
pathToUnit = name: def:
-
{ inherit (def) aliases wantedBy requiredBy enable;
-
text = commonUnitText def +
-
''
-
[Path]
-
${attrsToSection def.pathConfig}
-
'';
-
};
-
-
mountToUnit = name: def:
-
{ inherit (def) aliases wantedBy requiredBy enable;
-
text = commonUnitText def +
-
''
-
[Mount]
-
${attrsToSection def.mountConfig}
-
'';
-
};
-
-
automountToUnit = name: def:
-
{ inherit (def) aliases wantedBy requiredBy enable;
-
text = commonUnitText def +
-
''
-
[Automount]
-
${attrsToSection def.automountConfig}
-
'';
-
};
-
-
sliceToUnit = name: def:
-
{ inherit (def) aliases wantedBy requiredBy enable;
-
text = commonUnitText def +
-
''
-
[Slice]
-
${attrsToSection def.sliceConfig}
-
'';
-
};
logindHandlerType = types.enum [
"ignore" "poweroff" "reboot" "halt" "kexec" "suspend"
+4 -4
nixos/tests/all-tests.nix
···
grocy = handleTest ./grocy.nix {};
grub = handleTest ./grub.nix {};
gvisor = handleTest ./gvisor.nix {};
-
hadoop.all = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hadoop/hadoop.nix {};
-
hadoop.hdfs = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hadoop/hdfs.nix {};
-
hadoop.yarn = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hadoop/yarn.nix {};
+
hadoop = import ./hadoop { inherit handleTestOn; package=pkgs.hadoop; };
+
hadoop_3_2 = import ./hadoop { inherit handleTestOn; package=pkgs.hadoop_3_2; };
+
hadoop2 = import ./hadoop { inherit handleTestOn; package=pkgs.hadoop2; };
haka = handleTest ./haka.nix {};
haproxy = handleTest ./haproxy.nix {};
hardened = handleTest ./hardened.nix {};
···
sonarr = handleTest ./sonarr.nix {};
sourcehut = handleTest ./sourcehut.nix {};
spacecookie = handleTest ./spacecookie.nix {};
-
spark = handleTestOn ["x86_64-linux"] ./spark {};
+
spark = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./spark {};
sslh = handleTest ./sslh.nix {};
sssd = handleTestOn ["x86_64-linux"] ./sssd.nix {};
sssd-ldap = handleTestOn ["x86_64-linux"] ./sssd-ldap.nix {};
+7
nixos/tests/hadoop/default.nix
···
+
{ handleTestOn, package, ... }:
+
+
{
+
all = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hadoop.nix { inherit package; };
+
hdfs = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hdfs.nix { inherit package; };
+
yarn = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./yarn.nix { inherit package; };
+
}
+146 -119
nixos/tests/hadoop/hadoop.nix
···
# This test is very comprehensive. It tests whether all hadoop services work well with each other.
# Run this when updating the Hadoop package or making significant changes to the hadoop module.
# For a more basic test, see hdfs.nix and yarn.nix
-
import ../make-test-python.nix ({pkgs, ...}: {
+
import ../make-test-python.nix ({ package, ... }: {
+
name = "hadoop-combined";
-
nodes = let
-
package = pkgs.hadoop;
-
coreSite = {
-
"fs.defaultFS" = "hdfs://ns1";
-
};
-
hdfsSite = {
-
"dfs.namenode.rpc-bind-host" = "0.0.0.0";
-
"dfs.namenode.http-bind-host" = "0.0.0.0";
-
"dfs.namenode.servicerpc-bind-host" = "0.0.0.0";
+
nodes =
+
let
+
coreSite = {
+
"fs.defaultFS" = "hdfs://ns1";
+
};
+
hdfsSite = {
+
# HA Quorum Journal Manager configuration
+
"dfs.nameservices" = "ns1";
+
"dfs.ha.namenodes.ns1" = "nn1,nn2";
+
"dfs.namenode.shared.edits.dir.ns1" = "qjournal://jn1:8485;jn2:8485;jn3:8485/ns1";
+
"dfs.namenode.rpc-address.ns1.nn1" = "nn1:8020";
+
"dfs.namenode.rpc-address.ns1.nn2" = "nn2:8020";
+
"dfs.namenode.servicerpc-address.ns1.nn1" = "nn1:8022";
+
"dfs.namenode.servicerpc-address.ns1.nn2" = "nn2:8022";
+
"dfs.namenode.http-address.ns1.nn1" = "nn1:9870";
+
"dfs.namenode.http-address.ns1.nn2" = "nn2:9870";
-
# HA Quorum Journal Manager configuration
-
"dfs.nameservices" = "ns1";
-
"dfs.ha.namenodes.ns1" = "nn1,nn2";
-
"dfs.namenode.shared.edits.dir.ns1.nn1" = "qjournal://jn1:8485;jn2:8485;jn3:8485/ns1";
-
"dfs.namenode.shared.edits.dir.ns1.nn2" = "qjournal://jn1:8485;jn2:8485;jn3:8485/ns1";
-
"dfs.namenode.rpc-address.ns1.nn1" = "nn1:8020";
-
"dfs.namenode.rpc-address.ns1.nn2" = "nn2:8020";
-
"dfs.namenode.servicerpc-address.ns1.nn1" = "nn1:8022";
-
"dfs.namenode.servicerpc-address.ns1.nn2" = "nn2:8022";
-
"dfs.namenode.http-address.ns1.nn1" = "nn1:9870";
-
"dfs.namenode.http-address.ns1.nn2" = "nn2:9870";
+
# Automatic failover configuration
+
"dfs.client.failover.proxy.provider.ns1" = "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider";
+
"dfs.ha.automatic-failover.enabled.ns1" = "true";
+
"dfs.ha.fencing.methods" = "shell(true)";
+
"ha.zookeeper.quorum" = "zk1:2181";
+
};
+
yarnSite = {
+
"yarn.resourcemanager.zk-address" = "zk1:2181";
+
"yarn.resourcemanager.ha.enabled" = "true";
+
"yarn.resourcemanager.ha.rm-ids" = "rm1,rm2";
+
"yarn.resourcemanager.hostname.rm1" = "rm1";
+
"yarn.resourcemanager.hostname.rm2" = "rm2";
+
"yarn.resourcemanager.ha.automatic-failover.enabled" = "true";
+
"yarn.resourcemanager.cluster-id" = "cluster1";
+
# yarn.resourcemanager.webapp.address needs to be defined even though yarn.resourcemanager.hostname is set. This shouldn't be necessary, but there's a bug in
+
# hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java:70
+
# that causes AM containers to fail otherwise.
+
"yarn.resourcemanager.webapp.address.rm1" = "rm1:8088";
+
"yarn.resourcemanager.webapp.address.rm2" = "rm2:8088";
+
};
+
in
+
{
+
zk1 = { ... }: {
+
services.zookeeper.enable = true;
+
networking.firewall.allowedTCPPorts = [ 2181 ];
+
};
-
# Automatic failover configuration
-
"dfs.client.failover.proxy.provider.ns1" = "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider";
-
"dfs.ha.automatic-failover.enabled.ns1" = "true";
-
"dfs.ha.fencing.methods" = "shell(true)";
-
"ha.zookeeper.quorum" = "zk1:2181";
-
};
-
yarnSiteHA = {
-
"yarn.resourcemanager.zk-address" = "zk1:2181";
-
"yarn.resourcemanager.ha.enabled" = "true";
-
"yarn.resourcemanager.ha.rm-ids" = "rm1,rm2";
-
"yarn.resourcemanager.hostname.rm1" = "rm1";
-
"yarn.resourcemanager.hostname.rm2" = "rm2";
-
"yarn.resourcemanager.ha.automatic-failover.enabled" = "true";
-
"yarn.resourcemanager.cluster-id" = "cluster1";
-
# yarn.resourcemanager.webapp.address needs to be defined even though yarn.resourcemanager.hostname is set. This shouldn't be necessary, but there's a bug in
-
# hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java:70
-
# that causes AM containers to fail otherwise.
-
"yarn.resourcemanager.webapp.address.rm1" = "rm1:8088";
-
"yarn.resourcemanager.webapp.address.rm2" = "rm2:8088";
-
};
-
in {
-
zk1 = { ... }: {
-
services.zookeeper.enable = true;
-
networking.firewall.allowedTCPPorts = [ 2181 ];
-
};
-
-
# HDFS cluster
-
nn1 = {pkgs, options, ...}: {
-
services.hadoop = {
-
inherit package coreSite hdfsSite;
-
hdfs.namenode.enable = true;
-
hdfs.zkfc.enable = true;
+
# HDFS cluster
+
nn1 = { ... }: {
+
services.hadoop = {
+
inherit package coreSite hdfsSite;
+
hdfs.namenode = {
+
enable = true;
+
openFirewall = true;
+
};
+
hdfs.zkfc.enable = true;
+
};
};
-
};
-
nn2 = {pkgs, options, ...}: {
-
services.hadoop = {
-
inherit package coreSite hdfsSite;
-
hdfs.namenode.enable = true;
-
hdfs.zkfc.enable = true;
+
nn2 = { ... }: {
+
services.hadoop = {
+
inherit package coreSite hdfsSite;
+
hdfs.namenode = {
+
enable = true;
+
openFirewall = true;
+
};
+
hdfs.zkfc.enable = true;
+
};
};
-
};
-
jn1 = {pkgs, options, ...}: {
-
services.hadoop = {
-
inherit package coreSite hdfsSite;
-
hdfs.journalnode.enable = true;
+
jn1 = { ... }: {
+
services.hadoop = {
+
inherit package coreSite hdfsSite;
+
hdfs.journalnode = {
+
enable = true;
+
openFirewall = true;
+
};
+
};
};
-
};
-
jn2 = {pkgs, options, ...}: {
-
services.hadoop = {
-
inherit package coreSite hdfsSite;
-
hdfs.journalnode.enable = true;
+
jn2 = { ... }: {
+
services.hadoop = {
+
inherit package coreSite hdfsSite;
+
hdfs.journalnode = {
+
enable = true;
+
openFirewall = true;
+
};
+
};
};
-
};
-
jn3 = {pkgs, options, ...}: {
-
services.hadoop = {
-
inherit package coreSite hdfsSite;
-
hdfs.journalnode.enable = true;
+
jn3 = { ... }: {
+
services.hadoop = {
+
inherit package coreSite hdfsSite;
+
hdfs.journalnode = {
+
enable = true;
+
openFirewall = true;
+
};
+
};
};
-
};
-
dn1 = {pkgs, options, ...}: {
-
services.hadoop = {
-
inherit package coreSite hdfsSite;
-
hdfs.datanode.enable = true;
+
dn1 = { ... }: {
+
services.hadoop = {
+
inherit package coreSite hdfsSite;
+
hdfs.datanode = {
+
enable = true;
+
openFirewall = true;
+
};
+
};
};
-
};
-
# YARN cluster
-
rm1 = {pkgs, options, ...}: {
-
services.hadoop = {
-
inherit package coreSite hdfsSite;
-
yarnSite = options.services.hadoop.yarnSite.default // yarnSiteHA;
-
yarn.resourcemanager.enable = true;
+
# YARN cluster
+
rm1 = { options, ... }: {
+
services.hadoop = {
+
inherit package coreSite hdfsSite yarnSite;
+
yarn.resourcemanager = {
+
enable = true;
+
openFirewall = true;
+
};
+
};
};
-
};
-
rm2 = {pkgs, options, ...}: {
-
services.hadoop = {
-
inherit package coreSite hdfsSite;
-
yarnSite = options.services.hadoop.yarnSite.default // yarnSiteHA;
-
yarn.resourcemanager.enable = true;
+
rm2 = { options, ... }: {
+
services.hadoop = {
+
inherit package coreSite hdfsSite yarnSite;
+
yarn.resourcemanager = {
+
enable = true;
+
openFirewall = true;
+
};
+
};
};
-
};
-
nm1 = {pkgs, options, ...}: {
-
virtualisation.memorySize = 2048;
-
services.hadoop = {
-
inherit package coreSite hdfsSite;
-
yarnSite = options.services.hadoop.yarnSite.default // yarnSiteHA;
-
yarn.nodemanager.enable = true;
+
nm1 = { options, ... }: {
+
virtualisation.memorySize = 2048;
+
services.hadoop = {
+
inherit package coreSite hdfsSite yarnSite;
+
yarn.nodemanager = {
+
enable = true;
+
openFirewall = true;
+
};
+
};
};
-
};
+
client = { options, ... }: {
+
services.hadoop = {
+
gatewayRole.enable = true;
+
inherit package coreSite hdfsSite yarnSite;
+
};
+
};
};
testScript = ''
···
# DN should have started by now, but confirm anyway
dn1.wait_for_unit("hdfs-datanode")
# Print states of namenodes
-
dn1.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState | systemd-cat")
+
client.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState | systemd-cat")
# Wait for cluster to exit safemode
-
dn1.succeed("sudo -u hdfs hdfs dfsadmin -safemode wait")
-
dn1.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState | systemd-cat")
+
client.succeed("sudo -u hdfs hdfs dfsadmin -safemode wait")
+
client.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState | systemd-cat")
# test R/W
-
dn1.succeed("echo testfilecontents | sudo -u hdfs hdfs dfs -put - /testfile")
-
assert "testfilecontents" in dn1.succeed("sudo -u hdfs hdfs dfs -cat /testfile")
+
client.succeed("echo testfilecontents | sudo -u hdfs hdfs dfs -put - /testfile")
+
assert "testfilecontents" in client.succeed("sudo -u hdfs hdfs dfs -cat /testfile")
# Test NN failover
nn1.succeed("systemctl stop hdfs-namenode")
-
assert "active" in dn1.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState")
-
dn1.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState | systemd-cat")
-
assert "testfilecontents" in dn1.succeed("sudo -u hdfs hdfs dfs -cat /testfile")
+
assert "active" in client.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState")
+
client.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState | systemd-cat")
+
assert "testfilecontents" in client.succeed("sudo -u hdfs hdfs dfs -cat /testfile")
nn1.succeed("systemctl start hdfs-namenode")
nn1.wait_for_open_port(9870)
nn1.wait_for_open_port(8022)
nn1.wait_for_open_port(8020)
-
assert "standby" in dn1.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState")
-
dn1.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState | systemd-cat")
+
assert "standby" in client.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState")
+
client.succeed("sudo -u hdfs hdfs haadmin -getAllServiceState | systemd-cat")
#### YARN tests ####
···
nm1.wait_for_unit("yarn-nodemanager")
nm1.wait_for_open_port(8042)
nm1.wait_for_open_port(8040)
-
nm1.wait_until_succeeds("yarn node -list | grep Nodes:1")
-
nm1.succeed("sudo -u yarn yarn rmadmin -getAllServiceState | systemd-cat")
-
nm1.succeed("sudo -u yarn yarn node -list | systemd-cat")
+
client.wait_until_succeeds("yarn node -list | grep Nodes:1")
+
client.succeed("sudo -u yarn yarn rmadmin -getAllServiceState | systemd-cat")
+
client.succeed("sudo -u yarn yarn node -list | systemd-cat")
# Test RM failover
rm1.succeed("systemctl stop yarn-resourcemanager")
-
assert "standby" not in nm1.succeed("sudo -u yarn yarn rmadmin -getAllServiceState")
-
nm1.succeed("sudo -u yarn yarn rmadmin -getAllServiceState | systemd-cat")
+
assert "standby" not in client.succeed("sudo -u yarn yarn rmadmin -getAllServiceState")
+
client.succeed("sudo -u yarn yarn rmadmin -getAllServiceState | systemd-cat")
rm1.succeed("systemctl start yarn-resourcemanager")
rm1.wait_for_unit("yarn-resourcemanager")
rm1.wait_for_open_port(8088)
-
assert "standby" in nm1.succeed("sudo -u yarn yarn rmadmin -getAllServiceState")
-
nm1.succeed("sudo -u yarn yarn rmadmin -getAllServiceState | systemd-cat")
+
assert "standby" in client.succeed("sudo -u yarn yarn rmadmin -getAllServiceState")
+
client.succeed("sudo -u yarn yarn rmadmin -getAllServiceState | systemd-cat")
-
assert "Estimated value of Pi is" in nm1.succeed("HADOOP_USER_NAME=hdfs yarn jar $(readlink $(which yarn) | sed -r 's~bin/yarn~lib/hadoop-*/share/hadoop/mapreduce/hadoop-mapreduce-examples-*.jar~g') pi 2 10")
-
assert "SUCCEEDED" in nm1.succeed("yarn application -list -appStates FINISHED")
+
assert "Estimated value of Pi is" in client.succeed("HADOOP_USER_NAME=hdfs yarn jar $(readlink $(which yarn) | sed -r 's~bin/yarn~lib/hadoop-*/share/hadoop/mapreduce/hadoop-mapreduce-examples-*.jar~g') pi 2 10")
+
assert "SUCCEEDED" in client.succeed("yarn application -list -appStates FINISHED")
'';
})
+43 -18
nixos/tests/hadoop/hdfs.nix
···
# Test a minimal HDFS cluster with no HA
-
import ../make-test-python.nix ({...}: {
-
nodes = {
-
namenode = {pkgs, ...}: {
+
import ../make-test-python.nix ({ package, lib, ... }:
+
with lib;
+
{
+
name = "hadoop-hdfs";
+
+
nodes = let
+
coreSite = {
+
"fs.defaultFS" = "hdfs://namenode:8020";
+
"hadoop.proxyuser.httpfs.groups" = "*";
+
"hadoop.proxyuser.httpfs.hosts" = "*";
+
};
+
in {
+
namenode = { pkgs, ... }: {
services.hadoop = {
-
package = pkgs.hadoop;
+
inherit package;
hdfs = {
namenode = {
enable = true;
+
openFirewall = true;
formatOnInit = true;
};
-
httpfs.enable = true;
+
httpfs = {
+
# The NixOS hadoop module only support webHDFS on 3.3 and newer
+
enable = mkIf (versionAtLeast package.version "3.3") true;
+
openFirewall = true;
+
};
};
-
coreSite = {
-
"fs.defaultFS" = "hdfs://namenode:8020";
-
"hadoop.proxyuser.httpfs.groups" = "*";
-
"hadoop.proxyuser.httpfs.hosts" = "*";
-
};
+
inherit coreSite;
};
};
-
datanode = {pkgs, ...}: {
+
datanode = { pkgs, ... }: {
services.hadoop = {
-
package = pkgs.hadoop;
-
hdfs.datanode.enable = true;
-
coreSite = {
-
"fs.defaultFS" = "hdfs://namenode:8020";
-
"hadoop.proxyuser.httpfs.groups" = "*";
-
"hadoop.proxyuser.httpfs.hosts" = "*";
+
inherit package;
+
hdfs.datanode = {
+
enable = true;
+
openFirewall = true;
+
dataDirs = [{
+
type = "DISK";
+
path = "/tmp/dn1";
+
}];
};
+
inherit coreSite;
};
};
};
···
namenode.wait_for_unit("hdfs-namenode")
namenode.wait_for_unit("network.target")
namenode.wait_for_open_port(8020)
+
namenode.succeed("ss -tulpne | systemd-cat")
+
namenode.succeed("cat /etc/hadoop*/hdfs-site.xml | systemd-cat")
namenode.wait_for_open_port(9870)
datanode.wait_for_unit("hdfs-datanode")
datanode.wait_for_unit("network.target")
+
'' + ( if versionAtLeast package.version "3" then ''
datanode.wait_for_open_port(9864)
datanode.wait_for_open_port(9866)
datanode.wait_for_open_port(9867)
+
datanode.succeed("curl -f http://datanode:9864")
+
'' else ''
+
datanode.wait_for_open_port(50075)
+
datanode.wait_for_open_port(50010)
+
datanode.wait_for_open_port(50020)
+
+
datanode.succeed("curl -f http://datanode:50075")
+
'' ) + ''
namenode.succeed("curl -f http://namenode:9870")
-
datanode.succeed("curl -f http://datanode:9864")
datanode.succeed("sudo -u hdfs hdfs dfsadmin -safemode wait")
datanode.succeed("echo testfilecontents | sudo -u hdfs hdfs dfs -put - /testfile")
assert "testfilecontents" in datanode.succeed("sudo -u hdfs hdfs dfs -cat /testfile")
+
'' + optionalString ( versionAtLeast package.version "3.3" ) ''
namenode.wait_for_unit("hdfs-httpfs")
namenode.wait_for_open_port(14000)
assert "testfilecontents" in datanode.succeed("curl -f \"http://namenode:14000/webhdfs/v1/testfile?user.name=hdfs&op=OPEN\" 2>&1")
+21 -13
nixos/tests/hadoop/yarn.nix
···
# This only tests if YARN is able to start its services
-
import ../make-test-python.nix ({...}: {
+
import ../make-test-python.nix ({ package, ... }: {
+
name = "hadoop-yarn";
+
nodes = {
-
resourcemanager = {pkgs, ...}: {
-
services.hadoop.package = pkgs.hadoop;
-
services.hadoop.yarn.resourcemanager.enable = true;
-
services.hadoop.yarnSite = {
-
"yarn.resourcemanager.scheduler.class" = "org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler";
+
resourcemanager = { ... }: {
+
services.hadoop = {
+
inherit package;
+
yarn.resourcemanager = {
+
enable = true;
+
openFirewall = true;
+
};
};
};
-
nodemanager = {pkgs, ...}: {
-
services.hadoop.package = pkgs.hadoop;
-
services.hadoop.yarn.nodemanager.enable = true;
-
services.hadoop.yarnSite = {
-
"yarn.resourcemanager.hostname" = "resourcemanager";
-
"yarn.nodemanager.log-dirs" = "/tmp/userlogs";
+
nodemanager = { options, lib, ... }: {
+
services.hadoop = {
+
inherit package;
+
yarn.nodemanager = {
+
enable = true;
+
openFirewall = true;
+
};
+
yarnSite = options.services.hadoop.yarnSite.default // {
+
"yarn.resourcemanager.hostname" = "resourcemanager";
+
"yarn.nodemanager.log-dirs" = "/tmp/userlogs";
+
};
};
};
-
};
testScript = ''
+1 -1
nixos/tests/nextcloud/default.nix
···
};
})
{ }
-
[ 21 22 23 ]
+
[ 22 23 ]
+11 -2
pkgs/applications/audio/mopidy/ytmusic.nix
···
python3Packages.buildPythonApplication rec {
pname = "mopidy-ytmusic";
-
version = "0.3.2";
+
version = "0.3.5";
src = python3Packages.fetchPypi {
inherit version;
pname = "Mopidy-YTMusic";
-
sha256 = "sha256-BZtW+qHsTnOMj+jdAFI8ZMwGxJc9lNosgPJZGbt4JgU=";
+
sha256 = "0pncyxfqxvznb9y4ksndbny1yf5mxh4089ak0yz86dp2qi5j99iv";
};
+
postPatch = ''
+
substituteInPlace setup.py \
+
--replace 'ytmusicapi>=0.20.0,<0.21.0' 'ytmusicapi>=0.20.0'
+
'';
+
propagatedBuildInputs = [
mopidy
python3Packages.ytmusicapi
python3Packages.pytube
];
+
pythonImportsCheck = [ "mopidy_ytmusic" ];
+
+
# has no tests
doCheck = false;
meta = with lib; {
description = "Mopidy extension for playing music from YouTube Music";
+
homepage = "https://github.com/OzymandiasTheGreat/mopidy-ytmusic";
license = licenses.asl20;
maintainers = [ maintainers.nickhu ];
};
+83
pkgs/applications/editors/lapce/default.nix
···
+
{ lib
+
, stdenv
+
, fetchFromGitHub
+
, rustPlatform
+
, cmake
+
, pkg-config
+
, python3
+
, perl
+
, freetype
+
, fontconfig
+
, libxkbcommon
+
, xcbutil
+
, libX11
+
, libXcursor
+
, libXrandr
+
, libXi
+
, vulkan-loader
+
, copyDesktopItems
+
, makeDesktopItem
+
}:
+
+
rustPlatform.buildRustPackage rec {
+
pname = "lapce";
+
version = "0.0.10";
+
+
src = fetchFromGitHub {
+
owner = "lapce";
+
repo = pname;
+
rev = "v${version}";
+
sha256 = "tOVFm4DFQurFU4DtpPwxXQLbTGCZnrV1FfYKtvkRxRE=";
+
};
+
+
cargoPatches = [ ./fix-version.patch ];
+
+
cargoSha256 = "BwB3KgmI5XnZ5uHv6f+kGKBzpyxPWcoKvF7qw90eorI=";
+
+
nativeBuildInputs = [
+
cmake
+
pkg-config
+
python3
+
perl
+
copyDesktopItems
+
];
+
+
buildInputs = [
+
freetype
+
fontconfig
+
libxkbcommon
+
xcbutil
+
libX11
+
libXcursor
+
libXrandr
+
libXi
+
vulkan-loader
+
];
+
+
# Add missing vulkan dependency to rpath
+
preFixup = ''
+
patchelf --add-needed ${vulkan-loader}/lib/libvulkan.so.1 $out/bin/lapce
+
'';
+
+
postInstall = ''
+
install -Dm0644 $src/extra/images/logo.svg $out/share/icons/hicolor/scalable/apps/lapce.svg
+
'';
+
+
desktopItems = [ (makeDesktopItem {
+
name = "lapce";
+
exec = "lapce %F";
+
icon = "lapce";
+
desktopName = "Lapce";
+
comment = meta.description;
+
genericName = "Code Editor";
+
categories = [ "Development" "Utility" "TextEditor" ];
+
}) ];
+
+
meta = with lib; {
+
description = "Lightning-fast and Powerful Code Editor written in Rust";
+
homepage = "https://github.com/lapce/lapce";
+
license = with licenses; [ asl20 ];
+
maintainers = with maintainers; [ elliot ];
+
broken = stdenv.isDarwin;
+
};
+
}
+31
pkgs/applications/editors/lapce/fix-version.patch
···
+
diff --git a/Cargo.lock b/Cargo.lock
+
index bc9a0f8..45a74ad 100644
+
--- a/Cargo.lock
+
+++ b/Cargo.lock
+
@@ -2165,7 +2165,7 @@ dependencies = [
+
+
[[package]]
+
name = "lapce"
+
-version = "0.0.9"
+
+version = "0.0.10"
+
dependencies = [
+
"lapce-core",
+
"lapce-proxy",
+
@@ -2173,7 +2173,7 @@ dependencies = [
+
+
[[package]]
+
name = "lapce-core"
+
-version = "0.0.9"
+
+version = "0.0.10"
+
dependencies = [
+
"Inflector",
+
"alacritty_terminal 0.15.0",
+
@@ -2233,7 +2233,7 @@ dependencies = [
+
+
[[package]]
+
name = "lapce-proxy"
+
-version = "0.0.9"
+
+version = "0.0.10"
+
dependencies = [
+
"alacritty_terminal 0.16.0-rc2",
+
"anyhow",
+24
pkgs/applications/editors/vscode/extensions/default.nix
···
};
};
+
marp-team.marp-vscode = buildVscodeMarketplaceExtension {
+
mktplcRef = {
+
name = "marp-vscode";
+
publisher = "marp-team";
+
version = "1.5.0";
+
sha256 = "0wqsj8rp58vl3nafkjvyw394h5j4jd7d24ra6hkvfpnlzrgv4yhs";
+
};
+
meta = {
+
license = lib.licenses.mit;
+
};
+
};
+
mikestead.dotenv = buildVscodeMarketplaceExtension {
mktplcRef = {
name = "dotenv";
···
};
meta = {
license = lib.licenses.mit;
+
};
+
};
+
+
richie5um2.snake-trail = buildVscodeMarketplaceExtension {
+
mktplcRef = {
+
name = "snake-trail";
+
publisher = "richie5um2";
+
version = "0.6.0";
+
sha256 = "0wkpq9f48hplrgabb0v1ij6fc4sb8h4a93dagw4biprhnnm3qx49";
+
};
+
meta = with lib; {
+
license = licenses.mit;
};
};
+4 -4
pkgs/applications/misc/kratos/default.nix
···
buildGoModule rec {
pname = "kratos";
-
version = "0.8.0-alpha.3";
+
version = "0.8.3-alpha.1.pre.0";
src = fetchFromGitHub {
owner = "ory";
repo = "kratos";
rev = "v${version}";
-
sha256 = "0ihq2kxjackicxg0hrpmx6bsgz056xbaq3j8py37z2w6mwszarcg";
+
sha256 = "1225paf0x6lb6cb3q5f4lyz0r426ifx4x8145q7nsc6v64srck2y";
};
-
vendorSha256 = "175pckj30cm5xkbvsdvwzarvwapsylyjgj4ss8v5r1sa0fjpj008";
+
vendorSha256 = "10zhxbccjsp6hbmk2lnvbag6c92hz703mcaigaj4wvlf7glpldm6";
subPackages = [ "." ];
···
test/e2e/run.sh
script/testenv.sh
script/test-envs.sh
-
persistence/sql/migratest/update_fixtures.sh
+
script/debug-entrypoint.sh
)
patchShebangs "''${files[@]}"
+2 -2
pkgs/applications/misc/mob/default.nix
···
{ lib
, buildGoPackage
, fetchFromGitHub
-
-
, withSpeech ? true
+
, stdenv
+
, withSpeech ? !stdenv.isDarwin
, makeWrapper
, espeak-ng
}:
+3 -3
pkgs/applications/misc/stork/default.nix
···
rustPlatform.buildRustPackage rec {
pname = "stork";
-
version = "1.4.0";
+
version = "1.4.1";
src = fetchFromGitHub {
owner = "jameslittle230";
repo = "stork";
rev = "v${version}";
-
sha256 = "sha256-9fylJcUuModemkBRnXeFfB1b+CD9IvTxW+CnlqaUb60=";
+
sha256 = "sha256-aBsxRLUufVUauySCxZKk/ZfcU/5KR7jOHmnx6mHmsFs=";
};
-
cargoSha256 = "sha256-j7OXl66xuTuP6hWJs+xHrwtaBGAYt02OESCN6FH3KX0=";
+
cargoSha256 = "sha256-oNoWGdXYfp47IpqU1twbORPOYrHjArNf43Zyeyat4Xs=";
nativeBuildInputs = [ pkg-config ];
+2 -2
pkgs/applications/networking/browsers/brave/default.nix
···
stdenv.mkDerivation rec {
pname = "brave";
-
version = "1.36.111";
+
version = "1.36.116";
src = fetchurl {
url = "https://github.com/brave/brave-browser/releases/download/v${version}/brave-browser_${version}_amd64.deb";
-
sha256 = "bXZsUqLaP43wJV3Cehgblw1G179HgGhToSL36v5QseA=";
+
sha256 = "whGV0VgCm6JSyrcFQTKbM35b/qLQdBmChTrYuyC+OlI=";
};
dontConfigure = true;
+7 -7
pkgs/applications/networking/browsers/chromium/upstream-info.json
···
{
"stable": {
-
"version": "99.0.4844.51",
-
"sha256": "1qxsn8zvvvsnn0k7nn606rhaial8ikrlfh175msqpp50xibjxicp",
-
"sha256bin64": "04kqfppa88g2q54vp53avyyhqzrxljz49p4wqk76kq7fz2rm94x1",
+
"version": "99.0.4844.74",
+
"sha256": "165vzxv3xi4r9ia3qnqsr4p9ai0344w1pnq03c6jdq7x613lcprd",
+
"sha256bin64": "1xzr7qv4rcardl3apr8w22dn81lzqkklhp26qqlbdcylacqqji04",
"deps": {
"gn": {
"version": "2022-01-10",
···
}
},
"chromedriver": {
-
"version": "99.0.4844.35",
-
"sha256_linux": "1q10mn34s03zy0nqcgrjd7ry53g4paxpwcki1bgicpcrwnjlzc3y",
-
"sha256_darwin": "0mcfry8vqqc8n1sgyn2azr8pc4lgjnkpnhz0ggjqm12njq0lfjfx",
-
"sha256_darwin_aarch64": "19wpqd5mq2vrgma899vbbdqhg660x47v4ppbz1r8dcg5r5y93x3s"
+
"version": "99.0.4844.51",
+
"sha256_linux": "1r5wbcfbj9s216jyjasmiscsrsix9ap3pplp12rznrwn4898p51y",
+
"sha256_darwin": "1nak8p5hdrw94lx73m9c110zrwag4qr6487dhplm3qfrnrkdh8wp",
+
"sha256_darwin_aarch64": "0hkcx6a8bcjlbmp6z3ld23mi1kpyjn2g7m3ns9qw6ns4x3rn5i3r"
}
},
"beta": {
+4 -4
pkgs/applications/networking/cluster/fluxcd/default.nix
···
{ lib, buildGoModule, fetchFromGitHub, fetchzip, installShellFiles }:
let
-
version = "0.27.3";
-
sha256 = "08ax1033456hfm5qz0r671xm5ig0047nqp7xffyn9za498bm4i5q";
-
manifestsSha256 = "165kspq10nvlihcb1460qmbw5r1mlzs5gliw01qa4mymvzmlggk7";
+
version = "0.27.4";
+
sha256 = "06951i332gr17nsbns8mh4kcjilqfw5w95shaznpaksx93f554g0";
+
manifestsSha256 = "0fvzh7j3vi5hw8jbw2gisjnn53bffwnp7zm3dwcbv3svwpw7823d";
manifests = fetchzip {
url =
···
inherit sha256;
};
-
vendorSha256 = "sha256-ENSfec7iSKOkILgVCVnORpAia4D+vBjQAUXDA7EIvVQ=";
+
vendorSha256 = "sha256-7sHLXjyYMWSFckDPeVGJYK+nwhbRpD76tV334PCVYwA=";
postUnpack = ''
cp -r ${manifests} source/cmd/flux/manifests
+39 -26
pkgs/applications/networking/cluster/hadoop/default.nix
···
, zlib
, zstd
, openssl
+
, glibc
+
, nixosTests
}:
with lib;
···
assert elem stdenv.system [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ];
let
-
common = { pname, version, untarDir ? "${pname}-${version}", sha256, jdk, openssl ? null, nativeLibs ? [ ], libPatches ? "" }:
+
common = { pname, version, untarDir ? "${pname}-${version}", sha256, jdk, openssl ? null, nativeLibs ? [ ], libPatches ? "", tests }:
stdenv.mkDerivation rec {
inherit pname version jdk libPatches untarDir openssl;
src = fetchurl {
···
installPhase = ''
mkdir -p $out/{lib/${untarDir}/conf,bin,lib}
mv * $out/lib/${untarDir}
-
+
'' + optionalString stdenv.isLinux ''
+
# All versions need container-executor, but some versions can't use autoPatchelf because of broken SSL versions
+
patchelf --set-interpreter ${glibc.out}/lib64/ld-linux-x86-64.so.2 $out/lib/${untarDir}/bin/container-executor
+
'' + ''
for n in $(find $out/lib/${untarDir}/bin -type f ! -name "*.*"); do
makeWrapper "$n" "$out/bin/$(basename $n)"\
--set-default JAVA_HOME ${jdk.home}\
···
done
'' + libPatches;
+
passthru = { inherit tests; };
+
meta = {
homepage = "https://hadoop.apache.org/";
description = "Framework for distributed processing of large data sets across clusters of computers";
···
{
# Different version of hadoop support different java runtime versions
# https://cwiki.apache.org/confluence/display/HADOOP/Hadoop+Java+Versions
-
hadoop_3_3 =
-
common
-
(rec {
-
pname = "hadoop";
-
version = "3.3.1";
-
untarDir = "${pname}-${version}";
-
sha256 = rec {
-
x86_64-linux = "1b3v16ihysqaxw8za1r5jlnphy8dwhivdx2d0z64309w57ihlxxd";
-
x86_64-darwin = x86_64-linux;
-
aarch64-linux = "00ln18vpi07jq2slk3kplyhcj8ad41n0yl880q5cihilk7daclxz";
-
aarch64-darwin = aarch64-linux;
-
};
-
-
inherit openssl;
-
nativeLibs = [ stdenv.cc.cc.lib protobuf3_7 zlib snappy ];
-
libPatches = ''
-
ln -s ${getLib cyrus_sasl}/lib/libsasl2.so $out/lib/${untarDir}/lib/native/libsasl2.so.2
-
ln -s ${getLib openssl}/lib/libcrypto.so $out/lib/${untarDir}/lib/native/
-
ln -s ${getLib zlib}/lib/libz.so.1 $out/lib/${untarDir}/lib/native/
-
ln -s ${getLib zstd}/lib/libzstd.so.1 $out/lib/${untarDir}/lib/native/
-
ln -s ${getLib bzip2}/lib/libbz2.so.1 $out/lib/${untarDir}/lib/native/
-
'' + optionalString stdenv.isLinux "patchelf --add-rpath ${jdk.home}/lib/server $out/lib/${untarDir}/lib/native/libnativetask.so.1.0.0";
-
jdk = jdk11_headless;
-
});
+
hadoop_3_3 = common rec {
+
pname = "hadoop";
+
version = "3.3.1";
+
untarDir = "${pname}-${version}";
+
sha256 = rec {
+
x86_64-linux = "1b3v16ihysqaxw8za1r5jlnphy8dwhivdx2d0z64309w57ihlxxd";
+
x86_64-darwin = x86_64-linux;
+
aarch64-linux = "00ln18vpi07jq2slk3kplyhcj8ad41n0yl880q5cihilk7daclxz";
+
aarch64-darwin = aarch64-linux;
+
};
+
jdk = jdk11_headless;
+
inherit openssl;
+
# TODO: Package and add Intel Storage Acceleration Library
+
nativeLibs = [ stdenv.cc.cc.lib protobuf3_7 zlib snappy ];
+
libPatches = ''
+
ln -s ${getLib cyrus_sasl}/lib/libsasl2.so $out/lib/${untarDir}/lib/native/libsasl2.so.2
+
ln -s ${getLib openssl}/lib/libcrypto.so $out/lib/${untarDir}/lib/native/
+
ln -s ${getLib zlib}/lib/libz.so.1 $out/lib/${untarDir}/lib/native/
+
ln -s ${getLib zstd}/lib/libzstd.so.1 $out/lib/${untarDir}/lib/native/
+
ln -s ${getLib bzip2}/lib/libbz2.so.1 $out/lib/${untarDir}/lib/native/
+
'' + optionalString stdenv.isLinux ''
+
# libjvm.so for Java >=11
+
patchelf --add-rpath ${jdk.home}/lib/server $out/lib/${untarDir}/lib/native/libnativetask.so.1.0.0
+
# Java 8 has libjvm.so at a different path
+
patchelf --add-rpath ${jdk.home}/jre/lib/amd64/server $out/lib/${untarDir}/lib/native/libnativetask.so.1.0.0
+
'';
+
tests = nixosTests.hadoop;
+
};
hadoop_3_2 = common rec {
pname = "hadoop";
version = "3.2.2";
···
jdk = jdk8_headless;
# not using native libs because of broken openssl_1_0_2 dependency
# can be manually overriden
+
tests = nixosTests.hadoop_3_2;
};
hadoop2 = common rec {
pname = "hadoop";
version = "2.10.1";
sha256.x86_64-linux = "1w31x4bk9f2swnx8qxx0cgwfg8vbpm6cy5lvfnbbpl3rsjhmyg97";
jdk = jdk8_headless;
+
tests = nixosTests.hadoop2;
};
}
+3 -3
pkgs/applications/networking/cluster/qbec/default.nix
···
buildGoModule rec {
pname = "qbec";
-
version = "0.15.1";
+
version = "0.15.2";
src = fetchFromGitHub {
owner = "splunk";
repo = "qbec";
rev = "v${version}";
-
sha256 = "sha256-cXU+LnOCsGg+iwH5c7cKVi2Htw45AGxyjJFKXKbTkUo=";
+
sha256 = "sha256-js/UjnNYRW7s3b4TeprhmBe4cDLDYDrMeLtpASI9aN4=";
};
-
vendorSha256 = "sha256-CiVAzFN/ygIiyhZKYtJ197TZO3ppL/emWSj4hAlIanc=";
+
vendorSha256 = "sha256-oEbKk9cMbI0ZWXrfM8Y19OF/A75mwHl0C/PJx0oTOBo=";
doCheck = false;
+33 -22
pkgs/applications/networking/cluster/spark/default.nix
···
-
{ lib, stdenv, fetchzip, makeWrapper, jdk8, python3Packages, extraPythonPackages ? [], coreutils, hadoop
-
, RSupport? true, R
+
{ lib
+
, stdenv
+
, fetchzip
+
, makeWrapper
+
, jdk8
+
, python3Packages
+
, extraPythonPackages ? [ ]
+
, coreutils
+
, hadoop
+
, RSupport ? true
+
, R
}:
with lib;
let
-
spark = { pname, version, src }:
+
spark = { pname, version, sha256 }:
stdenv.mkDerivation rec {
-
inherit pname version src;
+
inherit pname version;
+
src = fetchzip {
+
url = "mirror://apache/spark/${pname}-${version}/${pname}-${version}-bin-without-hadoop.tgz";
+
sha256 = sha256;
+
};
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ jdk8 python3Packages.python ]
++ extraPythonPackages
···
'';
meta = {
-
description = "Apache Spark is a fast and general engine for large-scale data processing";
-
homepage = "https://spark.apache.org/";
-
license = lib.licenses.asl20;
-
platforms = lib.platforms.all;
-
maintainers = with maintainers; [ thoughtpolice offline kamilchm illustris ];
+
description = "Apache Spark is a fast and general engine for large-scale data processing";
+
homepage = "https://spark.apache.org/";
+
license = lib.licenses.asl20;
+
platforms = lib.platforms.all;
+
maintainers = with maintainers; [ thoughtpolice offline kamilchm illustris ];
repositories.git = "git://git.apache.org/spark.git";
};
};
-
in {
-
spark3 = spark rec {
+
in
+
{
+
spark_3_2 = spark rec {
+
pname = "spark";
+
version = "3.2.1";
+
sha256 = "0kxdqczwmj6pray0h8h1qhygni9m82jzznw5fbv9hrxrkq1v182d";
+
};
+
spark_3_1 = spark rec {
pname = "spark";
version = "3.1.2";
-
-
src = fetchzip {
-
url = "mirror://apache/spark/${pname}-${version}/${pname}-${version}-bin-without-hadoop.tgz";
-
sha256 = "1bgh2y6jm7wqy6yc40rx68xkki31i3jiri2yixb1bm0i9pvsj9yf";
-
};
+
sha256 = "1bgh2y6jm7wqy6yc40rx68xkki31i3jiri2yixb1bm0i9pvsj9yf";
};
-
spark2 = spark rec {
+
spark_2_4 = spark rec {
pname = "spark";
version = "2.4.8";
-
-
src = fetchzip {
-
url = "mirror://apache/spark/${pname}-${version}/${pname}-${version}-bin-without-hadoop.tgz";
-
sha256 = "1mkyq0gz9fiav25vr0dba5ivp0wh0mh7kswwnx8pvsmb6wbwyfxv";
-
};
+
sha256 = "1mkyq0gz9fiav25vr0dba5ivp0wh0mh7kswwnx8pvsmb6wbwyfxv";
};
}
+2 -2
pkgs/applications/networking/instant-messengers/cinny/default.nix
···
configOverrides = writeText "cinny-config-overrides.json" (builtins.toJSON conf);
in stdenv.mkDerivation rec {
pname = "cinny";
-
version = "1.8.0";
+
version = "1.8.1";
src = fetchurl {
url = "https://github.com/ajbura/cinny/releases/download/v${version}/cinny-v${version}.tar.gz";
-
sha256 = "0pbapzl3pfx87ns4vp7088kkhl34c0ihbq90r3d0iz6sa16mcs79";
+
sha256 = "13jd7hihkw3nlcj0m157z6qix61v6zjs52h5zmw2agm47qmv0w6z";
};
installPhase = ''
+7 -5
pkgs/applications/networking/mailreaders/himalaya/default.nix
···
rustPlatform.buildRustPackage rec {
pname = "himalaya";
-
version = "0.5.8";
+
version = "0.5.9";
src = fetchFromGitHub {
owner = "soywod";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-Ejaspj0YpkGmfO1omOhx8ZDg77J7NqC32mw5Cd3K1FM=";
+
sha256 = "sha256-g+ySsHnJ4FpmJLEjlutuiJmMkKI3Jb+HkWi1WBIo1aw=";
};
-
cargoSha256 = "sha256-xce2iHrqTxIirrut4dN7526pjE4T+ruaDS44jr+KeGs=";
+
cargoSha256 = "sha256-NkkONl57zSilElVAOXUBxWnims4+EIVkkTdExbeBAaQ=";
nativeBuildInputs = lib.optionals enableCompletions [ installShellFiles ]
++ lib.optionals (!stdenv.hostPlatform.isDarwin) [ pkg-config ];
···
openssl
];
+
# flag added because without end-to-end testing is ran which requires
+
# additional tooling and servers to test
cargoTestFlags = [ "--lib" ];
postInstall = lib.optionalString enableCompletions ''
···
'';
meta = with lib; {
-
description = "CLI email client written in Rust";
+
description = "Command-line interface for email management";
homepage = "https://github.com/soywod/himalaya";
changelog = "https://github.com/soywod/himalaya/blob/v${version}/CHANGELOG.md";
license = licenses.bsdOriginal;
-
maintainers = with maintainers; [ yanganto ];
+
maintainers = with maintainers; [ toastal yanganto ];
};
}
+2 -2
pkgs/applications/science/computer-architecture/qtrvsim/default.nix
···
stdenv.mkDerivation rec {
pname = "QtRVSim";
-
version = "0.9.1";
+
version = "0.9.2";
src = fetchFromGitHub {
owner = "cvut";
repo = "qtrvsim";
rev = "refs/tags/v${version}";
-
sha256 = "AOksVS0drIBnK4RCxZw40yVxf4E8GjG9kU0rIZsY9gA=";
+
sha256 = "B1l+ysrodeDbxYfdLLMF8yk4/uPXTcDrTaMtYm89HuU=";
};
nativeBuildInputs = [ cmake wrapQtAppsHook ];
+2 -2
pkgs/applications/video/freetube/default.nix
···
installPhase = ''
runHook preInstall
-
mkdir -p $out/bin $out/share/${pname} $out/share/applications
+
mkdir -p $out/bin $out/share/${pname} $out/share/applications $out/share/icons/hicolor/scalable/apps
cp -a ${appimageContents}/{locales,resources} $out/share/${pname}
cp -a ${appimageContents}/freetube.desktop $out/share/applications/${pname}.desktop
-
cp -a ${appimageContents}/usr/share/icons $out/share
+
cp -a ${appimageContents}/usr/share/icons/hicolor/scalable/freetube.svg $out/share/icons/hicolor/scalable/apps
substituteInPlace $out/share/applications/${pname}.desktop \
--replace 'Exec=AppRun' 'Exec=${pname}'
+5 -3
pkgs/applications/video/smplayer/default.nix
···
stdenv.mkDerivation rec {
pname = "smplayer";
-
version = "21.10.0";
+
version = "22.2.0";
src = fetchFromGitHub {
owner = "smplayer-dev";
repo = pname;
rev = "v${version}";
-
hash = "sha256-p6036c8KX3GCINmkjHZlDLgHhLKri+t2WNWzP4KsSI8=";
+
hash = "sha256-7DMvIqW3vzjVzJPyjbXuHHcf1T6EFcf/a/mVYqa3XS8=";
};
nativeBuildInputs = [
···
wrapQtAppsHook
];
-
buildInputs = [ qtscript ];
+
buildInputs = [
+
qtscript
+
];
dontUseQmakeConfigure = true;
+26
pkgs/data/fonts/borg-sans-mono/default.nix
···
+
{ lib, fetchzip }:
+
+
let
+
pname = "borg-sans-mono";
+
version = "0.2.0";
+
in
+
fetchzip {
+
name = "${pname}-${version}";
+
+
# https://github.com/marnen/borg-sans-mono/issues/19
+
url = "https://github.com/marnen/borg-sans-mono/files/107663/BorgSansMono.ttf.zip";
+
sha256 = "1gz4ab0smw76ih5cs2l3n92c77nv7ld5zghq42avjsfhxrc2n5ri";
+
+
postFetch = ''
+
mkdir -p $out/share/fonts
+
unzip -j $downloadedFile \*.ttf -d $out/share/fonts/truetype
+
'';
+
+
meta = with lib; {
+
description = "Droid Sans Mono Slashed + Hasklig-style ligatures";
+
homepage = "https://github.com/marnen/borg-sans-mono";
+
license = licenses.asl20;
+
platforms = platforms.all;
+
maintainers = with maintainers; [ atila ];
+
};
+
}
+2 -2
pkgs/data/fonts/vazir-fonts/default.nix
···
let
pname = "vazir-fonts";
-
version = "30.1.0";
+
version = "32.0.0";
in fetchFromGitHub {
name = "${pname}-${version}";
···
tar xf $downloadedFile --strip=1
find . -name '*.ttf' -exec install -m444 -Dt $out/share/fonts/truetype {} \;
'';
-
sha256 = "sha256-J1l6rBFgaXFtGnK0pH7GbaYTt5TI/OevjZrXmaEgkB4=";
+
sha256 = "sha256-Uy8hgBtCcTLwXu9FkLN1WavUfP74Jf53ChxVGS3UBVM=";
meta = with lib; {
homepage = "https://github.com/rastikerdar/vazir-font";
+2 -2
pkgs/data/icons/kora-icon-theme/default.nix
···
stdenv.mkDerivation rec {
pname = "kora-icon-theme";
-
version = "1.5.0";
+
version = "1.5.1";
src = fetchFromGitHub {
owner = "bikass";
repo = "kora";
rev = "v${version}";
-
sha256 = "sha256-kUgNj7KuxsQ/BvQ0ORl3xzEm9gv69+2PS0Bgv8i/S9U=";
+
sha256 = "sha256-3TKjd2Lblb+/zFq7rkdgnD1dJU3kis7QZi7Ui74IWzA=";
};
nativeBuildInputs = [
+9 -1
pkgs/development/compilers/crystal/default.nix
···
i686-linux = "linux-i686";
x86_64-darwin = "darwin-x86_64";
aarch64-darwin = "darwin-universal";
+
aarch64-linux = "linux-aarch64";
};
arch = archs.${stdenv.system} or (throw "system ${stdenv.system} not supported");
···
checkInputs = [ git gmp openssl readline libxml2 libyaml ];
+
binaryUrl = version: rel:
+
if arch == archs.aarch64-linux then
+
"https://dev.alpinelinux.org/archive/crystal/crystal-${version}-aarch64-alpine-linux-musl.tar.gz"
+
else
+
"https://github.com/crystal-lang/crystal/releases/download/${version}/crystal-${version}-${toString rel}-${arch}.tar.gz";
+
genericBinary = { version, sha256s, rel ? 1 }:
stdenv.mkDerivation rec {
pname = "crystal-binary";
inherit version;
src = fetchurl {
-
url = "https://github.com/crystal-lang/crystal/releases/download/${version}/crystal-${version}-${toString rel}-${arch}.tar.gz";
+
url = binaryUrl version rel;
sha256 = sha256s.${stdenv.system};
};
···
x86_64-linux = "1949argajiyqyq09824yj3wjyv88gd8wbf20xh895saqfykiq880";
i686-linux = "0w0f4fwr2ijhx59i7ppicbh05hfmq7vffmgl7lal6im945m29vch";
x86_64-darwin = "01n0rf8zh551vv8wq3h0ifnsai0fz9a77yq87xx81y9dscl9h099";
+
aarch64-linux = "0sns7l4q3z82qi3dc2r4p63f4s8hvifqzgq56ykwyrvawynjhd53";
};
};
+3 -3
pkgs/development/compilers/open-watcom/v2.nix
···
stdenv.mkDerivation rec {
pname = "open-watcom-v2";
-
version = "unstable-2022-02-22";
+
version = "unstable-2022-03-14";
name = "${pname}-unwrapped-${version}";
src = fetchFromGitHub {
owner = "open-watcom";
repo = "open-watcom-v2";
-
rev = "9e25b3d6b8066f09b4f7131a31de1cf2af691e9a";
-
sha256 = "1w336070kmhc6cmn2aqr8vm0fmw3yza2n0w4asvs2kqxjgmbn6i2";
+
rev = "22627ccc1bd3de70aff9ac056e0dc9ecf7f7b6ec";
+
sha256 = "khy/fhmQjTGKfx6iOUBt+ySwpEx0df/7meyNvBnJAPY=";
};
postPatch = ''
+3 -3
pkgs/development/dotnet-modules/python-language-server/default.nix
···
buildDotnetModule rec {
pname = "python-language-server";
-
version = "2021-09-08";
+
version = "2022-02-18";
src = fetchFromGitHub {
owner = "microsoft";
repo = "python-language-server";
-
rev = "26ea18997f45f7d7bc5a3c5a9efc723a8dbb02fa";
-
sha256 = "1m8pf9k20wy4fzv27v3bswvc8s01ag6ka2qm9nn6bgq0s0lq78mh";
+
rev = "52c1afd34b5acb0b44597bb8681232876fe94084";
+
sha256 = "05s8mwi3dqzjghgpr1mfs1b7cgrq818bbj1v7aly6axc8c2n4gny";
};
projectFile = "src/LanguageServer/Impl/Microsoft.Python.LanguageServer.csproj";
-2
pkgs/development/haskell-modules/non-hackage-packages.nix
···
nix-linter = self.callPackage ../../development/tools/analysis/nix-linter { };
-
nix-output-monitor = self.callPackage ../../tools/nix/nix-output-monitor { };
-
# hasura graphql-engine is not released to hackage.
# https://github.com/hasura/graphql-engine/issues/7391
ci-info = self.callPackage ../misc/haskell/hasura/ci-info.nix {};
+2 -2
pkgs/development/interpreters/python/default.nix
···
major = "3";
minor = "11";
patch = "0";
-
suffix = "a4";
+
suffix = "a6";
};
-
sha256 = "sha256-Q3/nN2w2Pa+vNM6A8ERrQfyaQsDiqMflGdPwoLfPs+0=";
+
sha256 = "sha256-HFOi/3WHljPjDKwp0qpregEONVuV8L+axpG+zPX50So=";
inherit (darwin) configd;
inherit passthruFun;
};
-44
pkgs/development/libraries/clearsilver/default.nix
···
-
{ lib, stdenv, fetchurl, fetchpatch, python2 }:
-
-
stdenv.mkDerivation rec {
-
pname = "clearsilver";
-
version = "0.10.5";
-
-
src = fetchurl {
-
url = "http://www.clearsilver.net/downloads/clearsilver-${version}.tar.gz";
-
sha256 = "1046m1dpq3nkgxbis2dr2x7hynmy51n64465q78d7pdgvqwa178y";
-
};
-
-
PYTHON_SITE = "${placeholder "out"}/${python2.sitePackages}";
-
-
configureFlags = [
-
"--with-python=${python2.interpreter}"
-
"--disable-apache"
-
"--disable-perl"
-
"--disable-ruby"
-
"--disable-java"
-
"--disable-csharp"
-
];
-
-
preInstall = ''
-
mkdir -p $out
-
mkdir -p $out/${python2.sitePackages}
-
'';
-
-
patches = [
-
(fetchpatch {
-
url = "https://sources.debian.net/data/main/c/clearsilver/0.10.5-1.6/debian/patches/clang-gcc5.patch";
-
sha256 = "0d44v9jx0b6k8nvrhknd958i9rs59kdh73z0lb4f1mzi8if16c38";
-
})
-
(fetchpatch {
-
url = "https://sources.debian.net/data/main/c/clearsilver/0.10.5-1.6/debian/patches/CVE-2011-4357.diff";
-
sha256 = "1lfncavxdqckrz03gv97lcliygbpi9lnih944vmdbn9zw6fwcipi";
-
})
-
];
-
-
meta = with lib; {
-
description = "Fast, powerful, and language-neutral HTML template system";
-
homepage = "http://www.clearsilver.net/";
-
license = licenses.free;
-
};
-
}
+2 -2
pkgs/development/libraries/intel-gmmlib/default.nix
···
stdenv.mkDerivation rec {
pname = "intel-gmmlib";
-
version = "22.0.3";
+
version = "22.1.0";
src = fetchFromGitHub {
owner = "intel";
repo = "gmmlib";
rev = "intel-gmmlib-${version}";
-
sha256 = "sha256-cXolz4hKLSTs8K9tCxaKnC2Pr0lQ0M+pPeF2w6bOAR8=";
+
sha256 = "sha256-4LFBokMEhhobKIMzZYlt3Nn88lX60l+IZZ0gi+o7Tds=";
};
nativeBuildInputs = [ cmake ];
+5 -4
pkgs/development/libraries/libarchive-qt/default.nix
···
-
{ mkDerivation, lib, fetchFromGitLab, libarchive, xz, zlib, bzip2, cmake, ninja }:
+
{ mkDerivation, lib, fetchFromGitLab, libarchive, xz, zlib, bzip2, meson, pkg-config, ninja }:
mkDerivation rec {
pname = "libarchive-qt";
-
version = "2.0.6";
+
version = "2.0.7";
src = fetchFromGitLab {
owner = "marcusbritanicus";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-Z+2zjQolV1Ncr6v9r7fGrc/fEMt0iMtGwv9eZ2Tu2cA=";
+
sha256 = "sha256-KRywB+Op44N00q9tgO2WNCliRgUDRvrCms1O8JYt62o=";
};
nativeBuildInputs = [
-
cmake
+
meson
ninja
+
pkg-config
];
buildInputs = [
+31
pkgs/development/libraries/libctl/default.nix
···
+
{ lib
+
, stdenv
+
, fetchFromGitHub
+
, autoreconfHook
+
, gfortran
+
, guile
+
, pkg-config
+
}:
+
+
stdenv.mkDerivation rec {
+
pname = "libctl";
+
version = "4.5.1";
+
+
src = fetchFromGitHub {
+
owner = "NanoComp";
+
repo = pname;
+
rev = "v${version}";
+
sha256 = "uOydBWYPXSBUi+4MM6FNx6B5l2to7Ny9Uc1MMTV9bGA=";
+
};
+
+
nativeBuildInputs = [ autoreconfHook gfortran guile pkg-config ];
+
+
configureFlags = [ "--enable-shared" ];
+
+
meta = with lib; {
+
description = "Guile-based library for supporting flexible control files in scientific simulations";
+
homepage = "https://github.com/NanoComp/libctl";
+
license = licenses.gpl2Only;
+
maintainers = with maintainers; [ carpinchomug ];
+
};
+
}
+5 -4
pkgs/development/libraries/libnbd/default.nix
···
, perl
, libxml2
, fuse
+
, fuse3
, gnutls
}:
stdenv.mkDerivation rec {
pname = "libnbd";
-
version = "1.9.5";
+
version = "1.12.2";
src = fetchurl {
-
url = "https://download.libguestfs.org/libnbd/${lib.versions.majorMinor version}-development/${pname}-${version}.tar.gz";
-
hash = "sha256-BnMoxIiuwhqcwVr3AwAIFgZPcFsIg55N66ZwWMTUnCw=";
+
url = "https://download.libguestfs.org/libnbd/${lib.versions.majorMinor version}-stable/${pname}-${version}.tar.gz";
+
hash = "sha256-57veJapt72LkP02wO4c1nDdHmnodqfT+rKPNDeTGQPM=";
};
nativeBuildInputs = [
···
buildInputs = [
fuse
+
fuse3
gnutls
libxml2
];
···
platforms = with platforms; linux;
};
}
-
# TODO: NBD URI support apparently is not enabled
# TODO: package the 1.6-stable version too
# TODO: git version needs ocaml
# TODO: bindings for go, ocaml and python
+2 -2
pkgs/development/libraries/opendht/default.nix
···
stdenv.mkDerivation rec {
pname = "opendht";
-
version = "2.3.2";
+
version = "2.3.5";
src = fetchFromGitHub {
owner = "savoirfairelinux";
repo = "opendht";
rev = version;
-
sha256 = "sha256-LevS9euBAFkI1ll79uqmVaRR/6FH6Z4cypHqvCIWxgU=";
+
sha256 = "sha256-GGaq8ziOCUDMxILq2QYUkSP4usBjbufbHwQF4Pr6hHw=";
};
nativeBuildInputs = [
+3 -3
pkgs/development/libraries/protolock/default.nix
···
buildGoModule rec {
pname = "protolock";
-
version = "0.15.2";
+
version = "0.16.0";
src = fetchFromGitHub {
owner = "nilslice";
repo = "protolock";
rev = "v${version}";
-
sha256 = "sha256-cKrG8f8cabuGDN1gmBYleXcBqeJksdREiEy63UK/6J0=";
+
sha256 = "sha256-vWwRZVArmlTIGwD4zV3dEHN2kkoeCZuNIvjCBVAviPo=";
};
-
vendorSha256 = "sha256-2XbBiiiPvZCnlKUzGDLFnxA34N/LmHoPbvRKZckmhx4=";
+
vendorSha256 = "sha256-kgSJUSjY8kgrGCNDPgw1WA8KwAqI5koJQ0IcE+tC5nk=";
doCheck = false;
+1 -1
pkgs/development/node-packages/default.nix
···
src = fetchurl {
url = "https://registry.npmjs.org/prisma/-/prisma-${version}.tgz";
-
sha512 = "sha512-dAld12vtwdz9Rz01nOjmnXe+vHana5PSog8t0XGgLemKsUVsaupYpr74AHaS3s78SaTS5s2HOghnJF+jn91ZrA==";
+
sha512 = "sha512-8SdsLPhKR3mOfoo2o73h9mNn3v5kA/RqGA26Sv6qDS78Eh2uepPqt5e8/nwj5EOblYm5HEGuitaXQrOCLb6uTw==";
};
postInstall = with pkgs; ''
wrapProgram "$out/bin/prisma" \
+2 -2
pkgs/development/python-modules/adafruit-platformdetect/default.nix
···
buildPythonPackage rec {
pname = "adafruit-platformdetect";
-
version = "3.21.0";
+
version = "3.21.1";
format = "setuptools";
src = fetchPypi {
pname = "Adafruit-PlatformDetect";
inherit version;
-
sha256 = "sha256-H65Ar/+9AwhKFNRK/SZyU8XzrMt3myjBo+YNJYtQ0b4=";
+
sha256 = "sha256-gVJUjxsl1rxvboL53186r63yp0k4FtTSgKJuqPzE2Q0=";
};
nativeBuildInputs = [
+2 -2
pkgs/development/python-modules/androidtv/default.nix
···
buildPythonPackage rec {
pname = "androidtv";
-
version = "0.0.64";
+
version = "0.0.65";
format = "setuptools";
disabled = pythonOlder "3.7";
···
owner = "JeffLIrion";
repo = "python-androidtv";
rev = "v${version}";
-
hash = "sha256-CJJ+mWAX9XG1/E2PljUZ8oz/la3hYXF1tMfuKt0Zvjw=";
+
hash = "sha256-bhXmPplRT9gzeD/GdD2HxN+Z4vvaiaxBwkqSml9SJUs=";
};
propagatedBuildInputs = [
+2 -2
pkgs/development/python-modules/awscrt/default.nix
···
buildPythonPackage rec {
pname = "awscrt";
-
version = "0.13.3";
+
version = "0.13.5";
format = "setuptools";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
-
hash = "sha256-1GaKDpOGX/YbM4rByTw0nYgwHYFvOLHZ0GRvanX3vAU=";
+
hash = "sha256-dUNljMKsbl6eByhEYivWgRJczTBw3N1RVl8r3e898mg=";
};
buildInputs = lib.optionals stdenv.isDarwin [
+10 -8
pkgs/development/python-modules/azure-mgmt-monitor/default.nix
···
{ lib
, buildPythonPackage
, fetchPypi
-
, isPy3k
+
, pythonOlder
, msrest
, msrestazure
, azure-common
, azure-mgmt-core
-
, azure-mgmt-nspkg
}:
buildPythonPackage rec {
pname = "azure-mgmt-monitor";
-
version = "3.0.0";
+
version = "3.1.0";
+
format = "setuptools";
+
+
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
extension = "zip";
-
sha256 = "91ddb7333bf2b9541a53864cc8d2501e3694a03a9c0e41cbfae3348558675ce6";
+
hash = "sha256-ROcUAm0KgIjO2A2XBpS00IeEPgd8x4cjoMfn6X9C+Gw=";
};
propagatedBuildInputs = [
···
msrestazure
azure-common
azure-mgmt-core
-
] ++ lib.optionals (!isPy3k) [
-
azure-mgmt-nspkg
];
-
pythonNamespaces = [ "azure.mgmt" ];
+
pythonNamespaces = [
+
"azure.mgmt"
+
];
-
# has no tests
+
# Module has no tests
doCheck = false;
meta = with lib; {
+23 -10
pkgs/development/python-modules/bumps/default.nix
···
-
{ lib, buildPythonPackage, fetchPypi, six}:
+
{ lib
+
, buildPythonPackage
+
, fetchPypi
+
, pythonOlder
+
, six
+
}:
buildPythonPackage rec {
pname = "bumps";
-
version = "0.8.1";
+
version = "0.9.0";
+
format = "setuptools";
-
propagatedBuildInputs = [six];
-
-
# Bumps does not provide its own tests.py, so the test
-
# always fails
-
doCheck = false;
+
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
-
sha256 = "f4f2ee712a1e468a2ce5c0a32f67739a83331f0cb7b9c50b9e7510daefc12169";
+
hash = "sha256-BY9kg0ksKfrpQgsl1aDDJJ+zKJmURqwTtKxlITxse+o=";
};
+
propagatedBuildInputs = [
+
six
+
];
+
+
# Module has no tests
+
doCheck = false;
+
+
pythonImportsCheck = [
+
"bumps"
+
];
+
meta = with lib; {
-
homepage = "https://www.reflectometry.org/danse/software.html";
description = "Data fitting with bayesian uncertainty analysis";
-
maintainers = with maintainers; [ rprospero ];
+
homepage = "https://bumps.readthedocs.io/";
license = licenses.publicDomain;
+
maintainers = with maintainers; [ rprospero ];
};
}
+10 -6
pkgs/development/python-modules/cloudscraper/default.nix
···
{ lib
, buildPythonPackage
-
, isPy3k
+
, pythonOlder
, fetchPypi
, requests
, requests-toolbelt
···
buildPythonPackage rec {
pname = "cloudscraper";
-
version = "1.2.58";
-
disabled = !isPy3k;
+
version = "1.2.60";
+
format = "setuptools";
+
+
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
-
sha256 = "1wnzv2k8cm8q1x18r4zg8pcnpm4gsdp82hywwjimp2v2qll918nx";
+
hash = "sha256-DTQTsv/59895UTsMmqxYtSfFosUWPRx8wMT4zKHQ9Oc=";
};
propagatedBuildInputs = [
···
# nixpkgs yet, and also aren't included in the PyPI bundle. TODO.
doCheck = false;
-
pythonImportsCheck = [ "cloudscraper" ];
+
pythonImportsCheck = [
+
"cloudscraper"
+
];
meta = with lib; {
-
description = "A Python module to bypass Cloudflare's anti-bot page";
+
description = "Python module to bypass Cloudflare's anti-bot page";
homepage = "https://github.com/venomous/cloudscraper";
license = licenses.mit;
maintainers = with maintainers; [ kini ];
+26
pkgs/development/python-modules/docx2txt/default.nix
···
+
{ lib
+
, buildPythonPackage
+
, fetchPypi
+
}:
+
+
buildPythonPackage rec {
+
pname = "docx2txt";
+
version = "0.8";
+
format = "setuptools";
+
+
src = fetchPypi {
+
inherit pname version;
+
hash = "sha256-LAbZjXz+LTlH5XYKV9kk4/8HdFs3nIc3cjki5wCSNuU=";
+
};
+
+
pythonImportsCheck = [
+
"docx2txt"
+
];
+
+
meta = with lib; {
+
description = "A pure python-based utility to extract text and images from docx files";
+
homepage = "https://github.com/ankushshah89/python-docx2txt";
+
license = licenses.mit;
+
maintainers = with maintainers; [ ilkecan ];
+
};
+
}
+2 -2
pkgs/development/python-modules/env-canada/default.nix
···
buildPythonPackage rec {
pname = "env-canada";
-
version = "0.5.20";
+
version = "0.5.21";
format = "setuptools";
disabled = pythonOlder "3.8";
···
owner = "michaeldavie";
repo = "env_canada";
rev = "v${version}";
-
sha256 = "sha256-gYl5+rtOzci3nhgP74VM37tNk9pPWgcNBfcSSG1fSJs=";
+
sha256 = "sha256-jildWpYWll5j7siYhNECMBjz9bF41xFA6NyydWNdgQE=";
};
propagatedBuildInputs = [
+2 -2
pkgs/development/python-modules/google-cloud-redis/default.nix
···
buildPythonPackage rec {
pname = "google-cloud-redis";
-
version = "2.7.1";
+
version = "2.8.0";
format = "setuptools";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
-
hash = "sha256-tz2upcRjgE6/4cB0riARwot3Vhw4QSKqqHTlJS3i7is=";
+
hash = "sha256-7L3SjViQmzTp//5LWWG9VG+TQuPay70KZdUuzhy7HS0=";
};
propagatedBuildInputs = [
+6 -2
pkgs/development/python-modules/google-cloud-storage/default.nix
···
, google-cloud-testutils
, google-resumable-media
, mock
+
, pythonOlder
}:
buildPythonPackage rec {
pname = "google-cloud-storage";
-
version = "2.2.0";
+
version = "2.2.1";
+
format = "setuptools";
+
+
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
-
sha256 = "sha256-01mWgBE11R20m7j3p+Kc7cwlqotDXu0MTA7y+e5W0dk=";
+
hash = "sha256-AkT0YScQy17ERfxndDh1ZOI/mCM2P7QIsock4hAkAbc=";
};
propagatedBuildInputs = [
+2 -2
pkgs/development/python-modules/intellifire4py/default.nix
···
buildPythonPackage rec {
pname = "intellifire4py";
-
version = "1.0.0";
+
version = "1.0.1";
format = "setuptools";
disabled = pythonOlder "3.7";
···
owner = "jeeftor";
repo = pname;
rev = version;
-
hash = "sha256-lQV5KpASbrz+wCi9x/0rNYrQE+dLCZzsNBFhYAQvPH4=";
+
hash = "sha256-hKe9sDn5t2qQ0THqFQypAGgr7cJXaZs8562NpPR/iJU=";
};
propagatedBuildInputs = [
+2 -2
pkgs/development/python-modules/mdformat/default.nix
···
buildPythonPackage rec {
pname = "mdformat";
-
version = "0.7.13";
+
version = "0.7.14";
format = "pyproject";
disabled = pythonOlder "3.7";
···
owner = "executablebooks";
repo = pname;
rev = version;
-
sha256 = "sha256-9ssDe7Wjuwuq2j7xwRyLqKouqeIt6NCUbEXjPdu2VZ8=";
+
sha256 = "sha256-bImBW6r8g/4MQ9yNrBBhk7AGqKRXFyAew6HHEmqelxw=";
};
nativeBuildInputs = [
+2 -2
pkgs/development/python-modules/meshtastic/default.nix
···
buildPythonPackage rec {
pname = "meshtastic";
-
version = "1.2.90";
+
version = "1.2.92";
format = "setuptools";
disabled = pythonOlder "3.6";
···
owner = "meshtastic";
repo = "Meshtastic-python";
rev = version;
-
sha256 = "sha256-n/M1Q6YS3EkUcn45ffiTy0wuj9yKf6qBLLfD2XJkhHU=";
+
sha256 = "sha256-tK711Lewr5Zc6dy/cDe9UEnq9zOEvuJg4mZyO3zBLR0=";
};
propagatedBuildInputs = [
+56 -35
pkgs/development/python-modules/nassl/default.nix
···
, tls-parser
, cacert
, pytestCheckHook
+
, pythonAtLeast
, pythonOlder
}:
···
"enable-tls1_3"
"no-async"
];
-
patches = builtins.filter (
-
p: (builtins.baseNameOf (toString p)) != "macos-yosemite-compat.patch"
-
) oldAttrs.patches;
+
patches = builtins.filter
+
(
+
p: (builtins.baseNameOf (toString p)) != "macos-yosemite-compat.patch"
+
)
+
oldAttrs.patches;
buildInputs = oldAttrs.buildInputs ++ [ zlibStatic cacert ];
meta = oldAttrs.meta // {
knownVulnerabilities = [
···
sha256 = "1zqb1rff1wikc62a7vj5qxd1k191m8qif5d05mwdxz2wnzywlg72";
};
configureFlags = oldAttrs.configureFlags ++ nasslOpensslFlagsCommon;
-
patches = builtins.filter (
-
p: (builtins.baseNameOf (toString p)) == "darwin64-arm64.patch"
-
) oldAttrs.patches;
+
patches = builtins.filter
+
(
+
p: (builtins.baseNameOf (toString p)) == "darwin64-arm64.patch"
+
)
+
oldAttrs.patches;
buildInputs = oldAttrs.buildInputs ++ [ zlibStatic ];
# openssl_1_0_2 needs `withDocs = false`
outputs = lib.remove "doc" oldAttrs.outputs;
···
in
buildPythonPackage rec {
pname = "nassl";
-
version = "4.0.1";
+
version = "4.0.2";
+
format = "setuptools";
+
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "nabla-c0d3";
repo = pname;
rev = version;
-
hash = "sha256-QzO7ABh2weBO6NVFIj7kZpS8ashbDGompuvdKteJeUc=";
+
hash = "sha256-lLyHXLmBVvT+LgsKBU8DcUXd0qaLSrwvXxFnIB9CHcU=";
};
-
postPatch = let
-
legacyOpenSSLVersion = lib.replaceStrings ["."] ["_"] opensslLegacyStatic.version;
-
modernOpenSSLVersion = lib.replaceStrings ["."] ["_"] opensslStatic.version;
-
zlibVersion = zlibStatic.version;
-
in ''
-
mkdir -p deps/openssl-OpenSSL_${legacyOpenSSLVersion}/
-
cp ${opensslLegacyStatic.out}/lib/libssl.a \
-
${opensslLegacyStatic.out}/lib/libcrypto.a \
-
deps/openssl-OpenSSL_${legacyOpenSSLVersion}/
-
ln -s ${opensslLegacyStatic.out.dev}/include deps/openssl-OpenSSL_${legacyOpenSSLVersion}/include
-
ln -s ${opensslLegacyStatic.bin}/bin deps/openssl-OpenSSL_${legacyOpenSSLVersion}/apps
+
postPatch =
+
let
+
legacyOpenSSLVersion = lib.replaceStrings [ "." ] [ "_" ] opensslLegacyStatic.version;
+
modernOpenSSLVersion = lib.replaceStrings [ "." ] [ "_" ] opensslStatic.version;
+
zlibVersion = zlibStatic.version;
+
in
+
''
+
mkdir -p deps/openssl-OpenSSL_${legacyOpenSSLVersion}/
+
cp ${opensslLegacyStatic.out}/lib/libssl.a \
+
${opensslLegacyStatic.out}/lib/libcrypto.a \
+
deps/openssl-OpenSSL_${legacyOpenSSLVersion}/
+
ln -s ${opensslLegacyStatic.out.dev}/include deps/openssl-OpenSSL_${legacyOpenSSLVersion}/include
+
ln -s ${opensslLegacyStatic.bin}/bin deps/openssl-OpenSSL_${legacyOpenSSLVersion}/apps
+
+
mkdir -p deps/openssl-OpenSSL_${modernOpenSSLVersion}/
+
cp ${opensslStatic.out}/lib/libssl.a \
+
${opensslStatic.out}/lib/libcrypto.a \
+
deps/openssl-OpenSSL_${modernOpenSSLVersion}/
+
ln -s ${opensslStatic.out.dev}/include deps/openssl-OpenSSL_${modernOpenSSLVersion}/include
+
ln -s ${opensslStatic.bin}/bin deps/openssl-OpenSSL_${modernOpenSSLVersion}/apps
-
mkdir -p deps/openssl-OpenSSL_${modernOpenSSLVersion}/
-
cp ${opensslStatic.out}/lib/libssl.a \
-
${opensslStatic.out}/lib/libcrypto.a \
-
deps/openssl-OpenSSL_${modernOpenSSLVersion}/
-
ln -s ${opensslStatic.out.dev}/include deps/openssl-OpenSSL_${modernOpenSSLVersion}/include
-
ln -s ${opensslStatic.bin}/bin deps/openssl-OpenSSL_${modernOpenSSLVersion}/apps
+
mkdir -p deps/zlib-${zlibVersion}/
+
cp ${zlibStatic.out}/lib/libz.a deps/zlib-${zlibVersion}/
+
'';
-
mkdir -p deps/zlib-${zlibVersion}/
-
cp ${zlibStatic.out}/lib/libz.a deps/zlib-${zlibVersion}/
-
'';
+
nativeBuildInputs = [
+
invoke
+
];
-
propagatedBuildInputs = [ tls-parser ];
+
propagatedBuildInputs = [
+
tls-parser
+
];
-
nativeBuildInputs = [ invoke ];
+
checkInputs = [
+
pytestCheckHook
+
];
buildPhase = ''
invoke build.nassl
···
doCheck = true;
-
pythonImportsCheck = [ "nassl" ];
-
-
checkInputs = [ pytestCheckHook ];
+
pythonImportsCheck = [
+
"nassl"
+
];
disabledTests = [
"Online"
+
] ++ lib.optionals (pythonAtLeast "3.10") [
+
"test_write_bad"
+
"test_client_authentication_no_certificate_supplied"
+
"test_client_authentication_succeeds"
];
meta = with lib; {
+
description = "Low-level OpenSSL wrapper for Python";
homepage = "https://github.com/nabla-c0d3/nassl";
-
description = "Low-level OpenSSL wrapper for Python 3.7+";
-
platforms = with platforms; linux ++ darwin;
license = licenses.agpl3Only;
maintainers = with maintainers; [ veehaitch ];
+
platforms = with platforms; linux ++ darwin;
};
}
+2 -2
pkgs/development/python-modules/neo4j-driver/default.nix
···
buildPythonPackage rec {
pname = "neo4j-driver";
-
version = "4.4.1";
+
version = "4.4.2";
format = "setuptools";
disabled = pythonOlder "3.7";
···
owner = "neo4j";
repo = "neo4j-python-driver";
rev = version;
-
sha256 = "sha256-aGOqD6mmd3dulQ/SdaDPDZhkCwXdYCucHw+CrkJf1M0=";
+
sha256 = "sha256-rYedmxQvT+RjVdbDckLv00J4YuEQtMuIc8Q5FGWr3Rw=";
};
propagatedBuildInputs = [
+2 -2
pkgs/development/python-modules/proxmoxer/default.nix
···
buildPythonPackage rec {
pname = "proxmoxer";
-
version = "1.2.0";
+
version = "1.3.0";
disabled = pythonOlder "3.6";
src = fetchFromGitHub {
owner = pname;
repo = pname;
rev = version;
-
sha256 = "sha256-ElHocXrazwK+b5vdjYSJAYB4ajs2n+V8koj4QKkdDMQ=";
+
sha256 = "sha256-3EpId20WVVjXA/wxwy1peyHPcXdiT3fprABkcNBpZtE=";
};
propagatedBuildInputs = [
+4 -2
pkgs/development/python-modules/pyaussiebb/default.nix
···
, buildPythonPackage
, fetchFromGitHub
, loguru
+
, pydantic
, poetry-core
, pythonOlder
, requests
···
buildPythonPackage rec {
pname = "pyaussiebb";
-
version = "0.0.11";
+
version = "0.0.12";
format = "pyproject";
disabled = pythonOlder "3.9";
···
owner = "yaleman";
repo = "aussiebb";
rev = "v${version}";
-
hash = "sha256-aL+n2ut7n6UUyymMEHoFMhRvK9iFRRunYE9ZirKFXhc=";
+
hash = "sha256-4B+eq863G+iVl8UnxDumPVpkj9W8kX5LK0wo4QIYo4w=";
};
nativeBuildInputs = [
···
aiohttp
requests
loguru
+
pydantic
];
postPatch = ''
+3 -3
pkgs/development/python-modules/pycep-parser/default.nix
···
buildPythonPackage rec {
pname = "pycep-parser";
-
version = "0.3.1";
+
version = "0.3.2";
format = "pyproject";
disabled = pythonOlder "3.7";
···
owner = "gruebel";
repo = "pycep";
rev = version;
-
hash = "sha256-S4jBqMgyreWrEp1SuR8J5RVFc+i1O0xbfgux1UvFP5k=";
+
hash = "sha256-ud26xJQWdu7wtv75/K16HSSw0MvaSr3H1hDZBPjSzYE=";
};
nativeBuildInputs = [
···
postPatch = ''
substituteInPlace pyproject.toml \
-
--replace 'version = "0.3.1-alpha.1"' 'version = "${version}"' \
+
--replace 'version = "0.3.2-alpha.4"' 'version = "${version}"' \
--replace 'regex = "^2022.3.2"' 'regex = "*"'
'';
+2 -2
pkgs/development/python-modules/pykrakenapi/default.nix
···
buildPythonPackage rec {
pname = "pykrakenapi";
-
version = "0.2.4";
+
version = "0.3.0";
src = fetchFromGitHub {
owner = "dominiktraxl";
repo = "pykrakenapi";
rev = "v${version}";
-
hash = "sha256-i2r6t+JcL6INI8Y26gvVvNjv6XxMj4G+pF9Xf/hsx1A=";
+
hash = "sha256-ZhP4TEWFEGIqI/nk2It1IVFKrX4HKP+dWxu+gLJNIeg=";
};
propagatedBuildInputs = [
+2 -2
pkgs/development/python-modules/pysigma-backend-splunk/default.nix
···
buildPythonPackage rec {
pname = "pysigma-backend-splunk";
-
version = "0.1.1";
+
version = "0.1.2";
format = "pyproject";
disabled = pythonOlder "3.8";
···
owner = "SigmaHQ";
repo = "pySigma-backend-splunk";
rev = "v${version}";
-
hash = "sha256-AGT+7BKtINe2ukmomYyoUa5PHYAH1N0tUTtbyjMD+kw=";
+
hash = "sha256-jKvGBUO55DtF6bpgEL82XB5Ba+kmqJsCqUdzftcpSJ0=";
};
nativeBuildInputs = [
+2 -2
pkgs/development/python-modules/pysigma-pipeline-crowdstrike/default.nix
···
buildPythonPackage rec {
pname = "pysigma-pipeline-crowdstrike";
-
version = "0.1.3";
+
version = "0.1.4";
format = "pyproject";
disabled = pythonOlder "3.8";
···
owner = "SigmaHQ";
repo = "pySigma-pipeline-crowdstrike";
rev = "v${version}";
-
hash = "sha256-JNJHKydMzKreN+6liLlGMT1CFBUr/IX8Ah+exddKR3g=";
+
hash = "sha256-Riu2u1IouS1BMtXauXrNMIl06TU11pHdC0jjlOiR71s=";
};
nativeBuildInputs = [
+2 -2
pkgs/development/python-modules/pysigma-pipeline-sysmon/default.nix
···
buildPythonPackage rec {
pname = "pysigma-pipeline-sysmon";
-
version = "0.1.1";
+
version = "0.1.2";
format = "pyproject";
disabled = pythonOlder "3.8";
···
owner = "SigmaHQ";
repo = "pySigma-pipeline-sysmon";
rev = "v${version}";
-
hash = "sha256-BBJt2SAbnPEzIwJ+tXW4NmA4Nrb/glIaPlnmYHLoMD0=";
+
hash = "sha256-Y9X9/ynrfs4gVTLl7pOvK3TH2Eh2vNF1S6Cnt3tByJM=";
};
nativeBuildInputs = [
+3 -15
pkgs/development/python-modules/pysigma/default.nix
···
buildPythonPackage rec {
pname = "pysigma";
-
version = "0.3.2";
+
version = "0.4.1";
format = "pyproject";
disabled = pythonOlder "3.8";
···
owner = "SigmaHQ";
repo = "pySigma";
rev = "v${version}";
-
hash = "sha256-V/E2rZqVrk0kIvk+hPhNcAifhMM/rN3mk3pB+CGd43w=";
+
hash = "sha256-egyzeniid2PZZQ6hsd44W+YURI8uGaXvDMuhNIXUqO0=";
};
nativeBuildInputs = [
···
pytestCheckHook
];
-
patches = [
-
# Switch to poetry-core, https://github.com/SigmaHQ/pySigma/pull/31
-
(fetchpatch {
-
name = "switch-to-poetry-core.patch";
-
url = "https://github.com/SigmaHQ/pySigma/commit/b7a852d18852007da90c2ec35bff347c97b36f07.patch";
-
sha256 = "sha256-zgg8Bsc37W2uuQluFpIZT4jHCQaitY2ZgS93Wk6Hxt0=";
-
})
-
];
-
postPatch = ''
-
# https://github.com/SigmaHQ/pySigma/issues/32
-
# https://github.com/SigmaHQ/pySigma/issues/33
substituteInPlace pyproject.toml \
-
--replace 'pyparsing = "^2.4.7"' 'pyparsing = "*"' \
-
--replace 'pyyaml = "^5.3.1"' 'pyyaml = "*"'
+
--replace 'pyparsing = "^3.0.7"' 'pyparsing = "*"' \
'';
pythonImportsCheck = [
+6 -2
pkgs/development/python-modules/pytest-json-report/default.nix
···
, pytest-metadata
, pytest-xdist
, pytestCheckHook
+
, pythonOlder
}:
buildPythonPackage rec {
pname = "pytest-json-report";
-
version = "1.4.1";
+
version = "1.5.0";
+
format = "setuptools";
+
+
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "numirias";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-OS9ASUp9iJ12Ovr931RQU/DHEAXqbgcRMCBP4h+GAhk=";
+
hash = "sha256-hMB/atDuo7CjwhHFUOxVfgJ7Qp4AA9J428iv7hyQFcs=";
};
buildInputs = [
+7 -3
pkgs/development/python-modules/python-box/default.nix
···
buildPythonPackage rec {
pname = "python-box";
-
version = "5.4.1";
+
version = "6.0.0";
+
format = "setuptools";
+
disabled = pythonOlder "3.6";
src = fetchFromGitHub {
owner = "cdgriffith";
repo = "Box";
rev = version;
-
sha256 = "sha256-SkteajcWG7rBFMm6Xp6QCfkZfwthRituGL/RtICbtYk=";
+
hash = "sha256-YOYcI+OAuTumNtTylUc6dSY9shOE6eTr8M3rVbcy5hs=";
};
propagatedBuildInputs = [
···
pytestCheckHook
];
-
pythonImportsCheck = [ "box" ];
+
pythonImportsCheck = [
+
"box"
+
];
meta = with lib; {
description = "Python dictionaries with advanced dot notation access";
+2 -2
pkgs/development/python-modules/python-http-client/default.nix
···
buildPythonPackage rec {
pname = "python_http_client";
-
version = "3.3.6";
+
version = "3.3.7";
format = "setuptools";
disabled = pythonOlder "3.8";
···
owner = "sendgrid";
repo = "python-http-client";
rev = version;
-
sha256 = "sha256-Xchf/jVkQ7SYOzI9f81iS/G72k//6wkl2bMvHprOP9Y=";
+
sha256 = "sha256-8Qs5Jw0LMV2UucLnlFKJQ2PUhYaQx6uJdIV/4gaPH3w=";
};
checkInputs = [
+2 -2
pkgs/development/python-modules/qcengine/default.nix
···
buildPythonPackage rec {
pname = "qcengine";
-
version = "0.22.0";
+
version = "0.23.0";
checkInputs = [ pytestCheckHook ];
···
src = fetchPypi {
inherit pname version;
-
sha256 = "685a08247b561ed1c7a7b42e68293f90b412e83556626304a3f826a15be51308";
+
sha256 = "sha256-gDn0Nu6ALTr3KyZnYDSA6RE3S5JQj562FP2RI9U3Gxs=";
};
doCheck = true;
+2 -2
pkgs/development/python-modules/readme_renderer/default.nix
···
buildPythonPackage rec {
pname = "readme-renderer";
-
version = "33.0";
+
version = "34.0";
format = "setuptools";
disabled = pythonOlder "3.6";
···
src = fetchPypi {
pname = "readme_renderer";
inherit version;
-
sha256 = "sha256-47U7yEvWrwVOTMH+NWfcGuGfVUE0IhBDo/jGdOIiCds=";
+
sha256 = "sha256-37TRfyFwbRRfdHPgthyiRbpY6BDPmyIJpII5Z3+C5bA=";
};
propagatedBuildInputs = [
+2 -2
pkgs/development/python-modules/samsungtvws/default.nix
···
buildPythonPackage rec {
pname = "samsungtvws";
-
version = "2.3.0";
+
version = "2.4.0";
disabled = isPy27;
src = fetchPypi {
inherit pname version;
-
sha256 = "sha256-2ly9lbnIHGHB55ml10jKE7dC5LdN1ToGW4GqfxTC5kI=";
+
sha256 = "sha256-LbNHaSbNCwoffox6B8kEUzxjkSJotB+P1bw3wbU7DZk=";
};
propagatedBuildInputs = [
+2 -2
pkgs/development/python-modules/soco/default.nix
···
buildPythonPackage rec {
pname = "soco";
-
version = "0.26.4";
+
version = "0.27.1";
format = "setuptools";
disabled = pythonOlder "3.6";
···
owner = "SoCo";
repo = "SoCo";
rev = "v${version}";
-
hash = "sha256-DoONq6Iqi8t47jtqggKYMHSNJAf/Kha3tszR6mYeB9Y=";
+
hash = "sha256-8U7wfxqen+hgK8j9ooPHCAKvd9kSZicToTyP7XzQFrg=";
};
propagatedBuildInputs = [
+2 -2
pkgs/development/python-modules/sslyze/default.nix
···
buildPythonPackage rec {
pname = "sslyze";
-
version = "5.0.2";
+
version = "5.0.3";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "nabla-c0d3";
repo = pname;
rev = version;
-
hash = "sha256-8xtnE5oFxH3wo2Smt65/xGDHxivexN6ggUpyUg42Cjk=";
+
hash = "sha256-d465WJIDsgNAPe8KW5v2KDSgzMH7OPLSiFfFH9n+jiA=";
};
patchPhase = ''
+11 -5
pkgs/development/python-modules/tls-parser/default.nix
···
buildPythonPackage rec {
pname = "tls-parser";
-
version = "1.2.2";
+
version = "2.0.0";
+
format = "setuptools";
+
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "nabla-c0d3";
repo = "tls_parser";
rev = version;
-
sha256 = "12qj3vg02r5a51w6gbgb1gcxicqc10lbbsdi57jkkfvbqiindbd0";
+
hash = "sha256-A1lYRe1sHDoOFdF20DP+xRMcPBWzokIXFphIpaBmwBc=";
};
-
checkInputs = [ pytestCheckHook ];
+
checkInputs = [
+
pytestCheckHook
+
];
-
pythonImportsCheck = [ "tls_parser" ];
+
pythonImportsCheck = [
+
"tls_parser"
+
];
meta = with lib; {
-
homepage = "https://github.com/nabla-c0d3/tls_parser";
description = "Small library to parse TLS records";
+
homepage = "https://github.com/nabla-c0d3/tls_parser";
platforms = with platforms; linux ++ darwin;
license = licenses.mit;
maintainers = with maintainers; [ veehaitch ];
+2 -2
pkgs/development/python-modules/twentemilieu/default.nix
···
buildPythonPackage rec {
pname = "twentemilieu";
-
version = "0.5.0";
+
version = "0.6.0";
format = "pyproject";
disabled = pythonOlder "3.8";
···
owner = "frenck";
repo = "python-twentemilieu";
rev = "v${version}";
-
sha256 = "sha256-7HQ0+h8oiyY+TacQdX84K0r994rH0AMZAvZz8PUvQl0=";
+
sha256 = "sha256-UE7fhbSThXmMns1XfUUQqw0wn5/w/x+UncansIBiank=";
};
postPatch = ''
+2 -2
pkgs/development/python-modules/vt-py/default.nix
···
buildPythonPackage rec {
pname = "vt-py";
-
version = "0.13.2";
+
version = "0.14.0";
format = "setuptools";
disabled = pythonOlder "3.6";
···
owner = "VirusTotal";
repo = pname;
rev = version;
-
sha256 = "sha256-ULzMz81s/C5wjIUtZ+Rz5o1Uump1FV0rTcNW9keERDk=";
+
sha256 = "sha256-901VW56vr6ysMlzspgVbPMLnDIpJRgSEOEQ8ohHp+mc=";
};
propagatedBuildInputs = [
+3 -2
pkgs/development/tools/analysis/checkov/default.nix
···
buildPythonApplication rec {
pname = "checkov";
-
version = "2.0.954";
+
version = "2.0.962";
src = fetchFromGitHub {
owner = "bridgecrewio";
repo = pname;
rev = version;
-
hash = "sha256-gCUciYTEL+4Pt9vAGbun0WFQWneOhDDXh7Dn9+sZbWw=";
+
hash = "sha256-hpoOOU1Z8xVqoJJdGcSoWujm3amiPkZ1Qjiqh66J+ZM=";
};
nativeBuildInputs = with py.pkgs; [
···
bc-python-hcl2
boto3
cachetools
+
charset-normalizer
cloudsplaining
colorama
configargparse
+3 -3
pkgs/development/tools/build-managers/bazel/bazel-remote/default.nix
···
buildGoModule rec {
pname = "bazel-remote";
-
version = "2.3.4";
+
version = "2.3.5";
src = fetchFromGitHub {
owner = "buchgr";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-ijR3RjGzm0HtVp5lSKGJemCGkRzhgQqaDOgg+MjDB1c=";
+
sha256 = "sha256-8tT00ppqBGLw2h+RxaiD7r3XYzyvXOHj1U8V5H/ftyQ=";
};
-
vendorSha256 = "sha256-NmTdS5xgv0o7AT4lBJk472Lq1e73EcrcfnI8RIxKEoc=";
+
vendorSha256 = "sha256-wXgW7HigMIeUZAcZpm5TH9thfCHmpz+M42toWHgwIYo=";
doCheck = false;
+3 -3
pkgs/development/tools/database/prisma-engines/default.nix
···
rustPlatform.buildRustPackage rec {
pname = "prisma-engines";
-
version = "3.10.0";
+
version = "3.11.0";
src = fetchFromGitHub {
owner = "prisma";
repo = "prisma-engines";
rev = version;
-
sha256 = "sha256-0m0RjIasEGB9QxZc7wKCMLnxHXkSlvCDA2QWa87mRRs=";
+
sha256 = "sha256-z7ebwidY+p350XaGeyohoSHWc2DhfzpRxsRDLON1BuA=";
};
# Use system openssl.
OPENSSL_NO_VENDOR = 1;
-
cargoSha256 = "sha256-KNQa+wLLl4abz48QKYkWu7A+FTGIyB+1EWAnLuWpJwc=";
+
cargoSha256 = "sha256-PQdLoNJL9szPzPtFRznWS0lngTvtWK+Ko2rp4JWH9dQ=";
nativeBuildInputs = [ pkg-config ];
+2 -2
pkgs/development/tools/doctl/default.nix
···
buildGoModule rec {
pname = "doctl";
-
version = "1.70.0";
+
version = "1.71.0";
vendorSha256 = null;
···
owner = "digitalocean";
repo = "doctl";
rev = "v${version}";
-
sha256 = "sha256-oLcWVUP9A/tcJYKiaBrqAdyNKESaVFOaNiG/fAVQb2c=";
+
sha256 = "sha256-cj2+DmJyLa6kFkH9JflaR3yFFXBaVZHO6czJGLEH7L0=";
};
meta = with lib; {
+32 -32
pkgs/development/tools/electron/default.nix
···
headers = "0vvizddmhprprbdf6bklasz6amwc254bpc9j0zlx23d1pgyxpnhc";
};
-
electron_14 = mkElectron "14.2.6" {
-
armv7l-linux = "fd115652f491fff6a28bf39dc41e3c7f1b638e7dcc7856c33b6a97c7763ea9a3";
-
aarch64-linux = "530df3030aeb2c0f67ba4bc210c0f0fe77670001d2ba30ad6858f74952528df2";
-
x86_64-linux = "c3f91ced7e429079d43c182f47cea1eceef17ab65c390e15f9c6af56e58ed3d9";
-
i686-linux = "d66881d0747c99618c500b46e044eb4e97442400624fbcf9a6af114743e6e8db";
-
x86_64-darwin = "15db43c17a33bf9e31f66f5025e0810dfbd2b237f7645eda51409de7930cc9d1";
-
aarch64-darwin = "a5f7b8cc5f6dfc7561368d2f09745967bb553a29a22ef74af8f795225483338a";
-
headers = "0rxbij6qvi0xzcmbxf3fm1snvakaxp9c512z9ni36y98sgg4s3l8";
+
electron_14 = mkElectron "14.2.7" {
+
armv7l-linux = "bb0c25671daa0dc235e212831d62f18b9a7f2692279bcd8e4a15f2d84ee7124d";
+
aarch64-linux = "149c5df2cf98ee0a2ce5445b3fb00752f42c3f7ab9677b7a54ba01fba2e2f4ec";
+
x86_64-linux = "ad80f424e8d8d79f0be078d8a1ddef8fd659fa3dd8aaf6704ab97f2a13489558";
+
i686-linux = "82b29272cb52dbe969c0bd6cf9b69896c86abe1d9ef473a3844c0ab3dc92b353";
+
x86_64-darwin = "2a5d8336dcd140158964801d482344756377d924a06e6605959034a41f7e026b";
+
aarch64-darwin = "b45869ff61bdf392bca498529b6445d47a784079f6a33af6b19d517953f03fd8";
+
headers = "0339fs3iyp869xi1xmn9z2b1n32wf408cc0z9bz6shns44ymkyhd";
};
-
electron_15 = mkElectron "15.4.0" {
-
armv7l-linux = "40c073a3b416f83264327bdf5e33b334ffcd56a729ef237360d66f520f670d16";
-
aarch64-linux = "ef18ba74b4fa34a26f9ee819bb908c60d0dd9ec2048414629979760f262d72f8";
-
x86_64-linux = "5bdea4cbf5559491e9ad9f365fa6f7ec26603fd6f68bfa8848f2884ebd51662d";
-
i686-linux = "636d0e28bb20ca127c9b8722fe39e7e7d95fc63bd15b156b7af563296b3d9595";
-
x86_64-darwin = "8a132b2be0f27c7e8fa9a91a8b4b0fcdf3ec571c721cb5f5610dc8a6b3f0fd26";
-
aarch64-darwin = "82b29c37a427464a9278d617435ca19f472b00689c9e58163e99f30b90f33046";
-
headers = "0fc1sck7g160klpqzfcqv9zc45ia914mrncyma58zzcbzpk6k6yb";
+
electron_15 = mkElectron "15.4.1" {
+
armv7l-linux = "e0fe5daed46a5d718b3209fa301aea743df694daf6605f9313f4ca6c70fe5167";
+
aarch64-linux = "fa108edd4c146811bdee842fcd278b046ae0ff157de5e072c3ff3ac0bcb310c2";
+
x86_64-linux = "867095924d434b3918df8576e7af94fecea4d29461fcfb69c40161f02158ff15";
+
i686-linux = "8e79fa9f4125f254abb437445fed8f3f8ec10dd2462e1ced3e7df49c622e087d";
+
x86_64-darwin = "899d16a0e0157809c297ceb3710c53441ec4396333d9ad5b65297446874e14dc";
+
aarch64-darwin = "8295bf45dab1131dfdfd15654a0b1d85bfae221052ba64353368a2c0faaaa3ff";
+
headers = "073697wjq60cnz42xmnjsr0xqcmcsl4m48mmzrz1rxrc8mvi86gr";
};
-
electron_16 = mkElectron "16.0.10" {
-
armv7l-linux = "1a72fe59011cfcc1f376f2948dd5a70d2f75d6c12fb682a0246d2e596227b5e0";
-
aarch64-linux = "46cd1393816364a666ead410505bce4b51d68ce872446a71d16886b88c4b275a";
-
x86_64-linux = "3b4779e41e27200ce5fa94d20f9df05ff5c757be6805eb0e8952fe198d66f324";
-
i686-linux = "9e1426a8135d3fe195ba9fc1a5ea5ad4d5ce96bd513691897b39106698e3c3c8";
-
x86_64-darwin = "00b0222efa67fbb29f723fabebc4221646ebd6d5fdc09524df9a203f63ce660c";
-
aarch64-darwin = "1203f6ec4e8b97312254ceb122ca4399f39ae67bfe1636e426a798c89ec2a9ee";
-
headers = "10f6px88vg6napyhniczi6l660qs4l5mm0b9gdlds4i1y94s1zrl";
+
electron_16 = mkElectron "16.1.0" {
+
armv7l-linux = "f3ab34c73b4100ffc5041ed9aa0608d1dc6b98fe3c2caa14be3d5c3ffbebda76";
+
aarch64-linux = "e80a7e4a59b94c7cd02b16ca37a2b0f26ddb58ddac23135c6180b238589f1c62";
+
x86_64-linux = "36c79af4d05e89ef9c9616a156f63adc5b453ee6bee5d8f4331e75ee77198e85";
+
i686-linux = "7129a96fc33de70cfe5d6d0e17ecff1b4dcf52d825a6ad05b10ca67da7b43665";
+
x86_64-darwin = "723859249e959948cdd339acf708022fb0195b433809af25b4a9f4d69b9da52f";
+
aarch64-darwin = "e76558028979f70107e5b1897275a9789be20b13991bfbcebeab7fc220f15764";
+
headers = "0yv9rssrfi0hdzrjf1n735dsz9sgy78jzxdcf9is2387yrr1qiyz";
};
-
electron_17 = mkElectron "17.1.0" {
-
armv7l-linux = "09d92195821aad4ac03fbc858287a7372b6aa059081bb825d267853ee1b0425d";
-
aarch64-linux = "6a34d6802d44a391902f53baf0adc8b819d33e7c31b34614757b17b3223c9d1e";
-
x86_64-linux = "106ec28a5969366c1e1f642cc33ac41950c68bd182db23b04d7ac6886bfe28e8";
-
i686-linux = "19fe206be07a6df7f9f0ecdb411e3cafd3a53618edb19cc6adc77156f698444e";
-
x86_64-darwin = "bd80d6b0b62c9bec195d264aa21a255f4324a8b56849e04972c1fcc262757c31";
-
aarch64-darwin = "83fe2fc24976d09a0e0fcc3a60226f190cf9b67287fe4434d3d76d59fa45315c";
-
headers = "1zv1pigfbis1bsan28wx3bgkdwjc48pjq19wmxs73kd1khsy6w8r";
+
electron_17 = mkElectron "17.1.2" {
+
armv7l-linux = "b561c04c9fa8c512f418ea5c32f5526732e1ccd150ee4830a0091d0fa1b7e31c";
+
aarch64-linux = "cda7e66c6672def9edd881107c28e2eec09b7802a38227ac89bb233191ce4840";
+
x86_64-linux = "7e7c35e8c1a0fc451e7af19fa73264881ae2c4672c52a2ae1cdd61604650ca94";
+
i686-linux = "de87a7952c93c1d8e8c533a700bbfc76d3893e9ad438413507d11450b80a9c97";
+
x86_64-darwin = "d4382d3f01b750676a1f3c9e2273ad69cac16dc64a4145469c663bcda8d2471b";
+
aarch64-darwin = "135dec87211fcefdb53ab1fef13344c7b71a321f7c4f6846f260c1e0848e73bf";
+
headers = "15k234d044lgmc3psyxz9syy9wvzgn54znklak9sv6gcajjzll10";
};
}
+4 -6
pkgs/development/tools/gomodifytags/default.nix
···
buildGoModule rec {
pname = "gomodifytags";
-
version = "1.6.0";
-
-
vendorSha256 = null;
-
-
doCheck = false;
+
version = "1.16.0";
src = fetchFromGitHub {
owner = "fatih";
repo = "gomodifytags";
rev = "v${version}";
-
sha256 = "1wmzl5sk5mc46njzn86007sqyyv6han058ppiw536qyhk88rzazq";
+
sha256 = "1yhkn9mdvsn9i5v03c5smz32zlhkylnxhkcbjb7llafxzbhzgfm6";
};
+
+
vendorSha256 = "sha256-8efqJfu+gtoFbhdlDZfb8NsXV9hBDI2pvAQNH18VVhU=";
meta = {
description = "Go tool to modify struct field tags";
+8 -8
pkgs/development/tools/jp/default.nix
···
-
{ lib, buildGoPackage, fetchFromGitHub }:
+
{ lib, buildGoModule, fetchFromGitHub }:
-
buildGoPackage rec {
+
buildGoModule rec {
pname = "jp";
-
version = "0.1.3";
-
rev = version;
-
-
goPackagePath = "github.com/jmespath/jp";
+
version = "0.2.1";
src = fetchFromGitHub {
-
inherit rev;
+
rev = version;
owner = "jmespath";
repo = "jp";
-
sha256 = "0fdbnihbd0kq56am3bmh2zrfk4fqjslcbm48malbgmpqw3a5nvpi";
+
hash = "sha256-a3WvLAdUZk+Y+L+opPDMBvdN5x5B6nAi/lL8JHJG/gY=";
};
+
+
vendorSha256 = "sha256-K6ZNtART7tcVBH5myV6vKrKWfnwK8yTa6/KK4QLyr00=";
+
meta = with lib; {
description = "A command line interface to the JMESPath expression language for JSON";
homepage = "https://github.com/jmespath/jp";
+2 -2
pkgs/development/tools/k6/default.nix
···
buildGoModule rec {
pname = "k6";
-
version = "0.36.0";
+
version = "0.37.0";
src = fetchFromGitHub {
owner = "grafana";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-yWEh0sPMGe6mNcLKhbmJEUCHzZKFGMcTRNQrHgiQ+BQ=";
+
sha256 = "sha256-5pxOg+pwa2VrEWinDadx2ZFYXiQgochbU4bCkJEezQw=";
};
subPackages = [ "./" ];
-2
pkgs/development/tools/kcli/default.nix
···
vendorSha256 = "0zj2hls8m0l9xsfv680wiwq1g2qjdjslv2yx3yd4rzxdsv2wz09a";
-
doCheck = false;
-
subPackages = [ "." ];
meta = with lib; {
+3 -3
pkgs/development/tools/rust/cargo-crev/default.nix
···
rustPlatform.buildRustPackage rec {
pname = "cargo-crev";
-
version = "0.23.0";
+
version = "0.23.1";
src = fetchFromGitHub {
owner = "crev-dev";
repo = "cargo-crev";
rev = "v${version}";
-
sha256 = "sha256-iqFE3sz7cIFcd9zCFjU1PjMWVmxCRcdiiGAbirWUWMA=";
+
sha256 = "sha256-XzjZEVyPVn+7VrjG4QsqVBFmuGC1TWTWLEoqFcwQhaI=";
};
-
cargoSha256 = "sha256-qlqW5phw7QI5KT2uUamQFEYHZd4uzYaUuvZTG3KhrOU=";
+
cargoSha256 = "sha256-p87ZnOxaF9ytSUxp0P3QE3K1/jo7hz/N7BH1f2Lc0I0=";
preCheck = ''
export HOME=$(mktemp -d)
+3 -3
pkgs/development/tools/stylua/default.nix
···
rustPlatform.buildRustPackage rec {
pname = "stylua";
-
version = "0.12.4";
+
version = "0.12.5";
src = fetchFromGitHub {
owner = "johnnymorganz";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-BPLN7/LaVDtCOJBgIJVbnENUyFtacRsK3JxDupytzOA=";
+
sha256 = "sha256-4tQQTTAdIAhlkBJevwwwGXOKd6bJJOyG4nlbCv7909Y=";
};
-
cargoSha256 = "sha256-MZsFbFQp5Rw20pXzvTFNhMiVx/TJZ63/2rU7vj7IcqQ=";
+
cargoSha256 = "sha256-DGe2lB8xZgY9ikTsIHDOdHzTyHfDaSlmy8FU/S9FDCI=";
buildFeatures = lib.optional lua52Support "lua52"
++ lib.optional luauSupport "luau";
+1 -1
pkgs/development/web/nodejs/setup-hook.sh
···
addNodePath () {
-
addToSearchPath NODE_PATH $1/lib/node_modules
+
addToSearchPath NODE_PATH "$1/lib/node_modules"
}
addEnvHooks "$hostOffset" addNodePath
+2 -2
pkgs/os-specific/linux/kernel/linux-4.14.nix
···
with lib;
buildLinux (args // rec {
-
version = "4.14.271";
+
version = "4.14.272";
# modDirVersion needs to be x.y.z, will automatically add .0 if needed
modDirVersion = if (modDirVersionArg == null) then concatStringsSep "." (take 3 (splitVersion "${version}.0")) else modDirVersionArg;
···
src = fetchurl {
url = "mirror://kernel/linux/kernel/v4.x/linux-${version}.tar.xz";
-
sha256 = "1mzxcjzw6y7b3fffz0hbgsl6328w3m5yv5xb21z57kr9vm828y80";
+
sha256 = "0scx13pc5y5jmm5xa17my242gsgb1mf0cgqzjx656g7kkh4phqcv";
};
} // (args.argsOverride or {}))
+2 -2
pkgs/os-specific/linux/kernel/linux-4.19.nix
···
with lib;
buildLinux (args // rec {
-
version = "4.19.234";
+
version = "4.19.235";
# modDirVersion needs to be x.y.z, will automatically add .0 if needed
modDirVersion = if (modDirVersionArg == null) then concatStringsSep "." (take 3 (splitVersion "${version}.0")) else modDirVersionArg;
···
src = fetchurl {
url = "mirror://kernel/linux/kernel/v4.x/linux-${version}.tar.xz";
-
sha256 = "12rd468wvmmdmgzy9vs2ny155yp9wxrf15lrslpc8xm4wimrd0h0";
+
sha256 = "1615y3ma9icmqqr7lisl8nd8zvvkh77a81yl39yvy6qi9345l32k";
};
} // (args.argsOverride or {}))
+2 -2
pkgs/os-specific/linux/kernel/linux-4.9.nix
···
{ buildPackages, fetchurl, perl, buildLinux, nixosTests, stdenv, ... } @ args:
buildLinux (args // rec {
-
version = "4.9.306";
+
version = "4.9.307";
extraMeta.branch = "4.9";
extraMeta.broken = stdenv.isAarch64;
src = fetchurl {
url = "mirror://kernel/linux/kernel/v4.x/linux-${version}.tar.xz";
-
sha256 = "1cvsz3sf24g2623m7fxc6ilzsdmzi8s8lnks3sg68sax0qdx0ny7";
+
sha256 = "1xyhz7hq8yyclxyavzk36sbl41vlb74pccd56240kq34ma1hyis7";
};
} // (args.argsOverride or {}))
+2 -2
pkgs/os-specific/linux/kernel/linux-5.10.nix
···
with lib;
buildLinux (args // rec {
-
version = "5.10.105";
+
version = "5.10.106";
# modDirVersion needs to be x.y.z, will automatically add .0 if needed
modDirVersion = if (modDirVersionArg == null) then concatStringsSep "." (take 3 (splitVersion "${version}.0")) else modDirVersionArg;
···
src = fetchurl {
url = "mirror://kernel/linux/kernel/v5.x/linux-${version}.tar.xz";
-
sha256 = "11fb9y6sqrf0hvak83ym7sbbacjl3q51w523vxjdpjmrn850xp1x";
+
sha256 = "0yjrlghcxw3lhd6nc2m4zy4gk536w3a3w6mxdsml690fqz4531n6";
};
} // (args.argsOverride or {}))
+2 -2
pkgs/os-specific/linux/kernel/linux-5.15.nix
···
with lib;
buildLinux (args // rec {
-
version = "5.15.28";
+
version = "5.15.29";
# modDirVersion needs to be x.y.z, will automatically add .0 if needed
modDirVersion = if (modDirVersionArg == null) then concatStringsSep "." (take 3 (splitVersion "${version}.0")) else modDirVersionArg;
···
src = fetchurl {
url = "mirror://kernel/linux/kernel/v5.x/linux-${version}.tar.xz";
-
sha256 = "1rhhn2a7799nnvx8dj83glb0p0qakxanhxvvl7crznvip7rvp8nq";
+
sha256 = "0vl7xm4xs59z071wfjna392yada3hg5h6h3dfjaswircc22fc1ar";
};
} // (args.argsOverride or { }))
+2 -2
pkgs/os-specific/linux/kernel/linux-5.16.nix
···
with lib;
buildLinux (args // rec {
-
version = "5.16.14";
+
version = "5.16.15";
# modDirVersion needs to be x.y.z, will automatically add .0 if needed
modDirVersion = if (modDirVersionArg == null) then concatStringsSep "." (take 3 (splitVersion "${version}.0")) else modDirVersionArg;
···
src = fetchurl {
url = "mirror://kernel/linux/kernel/v5.x/linux-${version}.tar.xz";
-
sha256 = "1xkl0mfjby7w6r3fqyjds94h2lmc77nzp970w7wz1rfmb63ab2vs";
+
sha256 = "1mi41npkk1inqchm3yp14xmzc5lrp50d7vbpazwxwq5kw04c8c4g";
};
} // (args.argsOverride or { }))
+2 -2
pkgs/os-specific/linux/kernel/linux-5.4.nix
···
with lib;
buildLinux (args // rec {
-
version = "5.4.184";
+
version = "5.4.185";
# modDirVersion needs to be x.y.z, will automatically add .0 if needed
modDirVersion = if (modDirVersionArg == null) then concatStringsSep "." (take 3 (splitVersion "${version}.0")) else modDirVersionArg;
···
src = fetchurl {
url = "mirror://kernel/linux/kernel/v5.x/linux-${version}.tar.xz";
-
sha256 = "128laiqkr6z3pya8ws7r2ddrpbc3xyn80zwclz2wlrf6wqwwm546";
+
sha256 = "11rp3x05bq9cs9gwy4x36ynkgl7nb5ss29zi6m7n5ywvczdfjpyi";
};
} // (args.argsOverride or {}))
+2 -2
pkgs/os-specific/linux/kernel/linux-libre.nix
···
{ stdenv, lib, fetchsvn, linux
, scripts ? fetchsvn {
url = "https://www.fsfla.org/svn/fsfla/software/linux-libre/releases/branches/";
-
rev = "18627";
-
sha256 = "0qlalxpw2a24625ck5mxchpxl6i6cgmzkzfgyp9apmhdy8590fv5";
+
rev = "18635";
+
sha256 = "0d74hji2cms9z3h3s1j4i7qnw1350a95vafrqargf9s2zz0bkgfc";
}
, ...
}:
+2 -2
pkgs/os-specific/linux/oci-seccomp-bpf-hook/default.nix
···
buildGoModule rec {
pname = "oci-seccomp-bpf-hook";
-
version = "1.2.3";
+
version = "1.2.5";
src = fetchFromGitHub {
owner = "containers";
repo = "oci-seccomp-bpf-hook";
rev = "v${version}";
-
sha256 = "sha256-EKD6tkdQCPlVlb9ScvRwDxYAtbbv9PIqBHH6SvtPDsE=";
+
sha256 = "sha256-PU7WX5RAV6wWVRsqq6MdEjr00AtlTT4cSacRaxrEF2s=";
};
vendorSha256 = null;
+1 -1
pkgs/os-specific/linux/tuigreet/default.nix
···
cargoSha256 = "sha256-H5xqk7Yd3M8sFGHlmhAS0fhh3eM4dkvkNQGVxRSXUJs=";
meta = with lib; {
-
description = "Graphical console greter for greetd";
+
description = "Graphical console greeter for greetd";
homepage = "https://github.com/apognu/tuigreet";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ luc65r ivar ];
+3 -1
pkgs/os-specific/linux/udisks/2-default.nix
···
blkid = "${util-linux}/bin/blkid";
false = "${coreutils}/bin/false";
mdadm = "${mdadm}/bin/mdadm";
+
mkswap = "${util-linux}/bin/mkswap";
sed = "${gnused}/bin/sed";
sh = "${bash}/bin/sh";
sleep = "${coreutils}/bin/sleep";
+
swapon = "${util-linux}/bin/swapon";
true = "${coreutils}/bin/true";
})
(substituteAll {
···
description = "A daemon, tools and libraries to access and manipulate disks, storage devices and technologies";
homepage = "https://www.freedesktop.org/wiki/Software/udisks/";
license = with licenses; [ lgpl2Plus gpl2Plus ]; # lgpl2Plus for the library, gpl2Plus for the tools & daemon
-
maintainers = with maintainers; [ johnazoidberg ];
+
maintainers = teams.freedesktop.members ++ (with maintainers; [ johnazoidberg ]);
platforms = platforms.linux;
};
}
+20 -17
pkgs/os-specific/linux/udisks/fix-paths.patch
···
-
diff --git a/Makefile.am b/Makefile.am
-
index 56922b79..697f8c6e 100644
-
--- a/Makefile.am
-
+++ b/Makefile.am
-
@@ -1,6 +1,6 @@
-
## Process this file with automake to produce Makefile.in
-
-
-SHELL = @BASH@
-
+SHELL = @bash@
-
.SHELLFLAGS = -o pipefail -c
-
-
PYTHON ?= python3
diff --git a/data/80-udisks2.rules b/data/80-udisks2.rules
-
index 39bfa28b..ee1ca90a 100644
+
index ca802cce..bfd1c29e 100644
--- a/data/80-udisks2.rules
+++ b/data/80-udisks2.rules
@@ -17,9 +17,9 @@ ENV{DM_UDEV_DISABLE_OTHER_RULES_FLAG}=="?*", GOTO="udisks_probe_end"
···
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
diff --git a/src/tests/integration-test b/src/tests/integration-test
-
index 4499a6a9..8b711f95 100755
+
index 07e4e029..3bd8ec51 100755
--- a/src/tests/integration-test
+++ b/src/tests/integration-test
+
@@ -299,7 +299,7 @@ class UDisksTestCase(unittest.TestCase):
+
if not device:
+
device = cls.devname(partition)
+
result = {}
+
- cmd = subprocess.Popen(['blkid', '-p', '-o', 'udev', device], stdout=subprocess.PIPE)
+
+ cmd = subprocess.Popen(['@blkid@', '-p', '-o', 'udev', device], stdout=subprocess.PIPE)
+
for l in cmd.stdout:
+
(key, value) = l.decode('UTF-8').split('=', 1)
+
result[key] = value.strip()
+
@@ -437,7 +437,7 @@ class UDisksTestCase(unittest.TestCase):
+
f.write('KERNEL=="sr*", ENV{DISK_EJECT_REQUEST}!="?*", '
+
'ATTRS{model}=="scsi_debug*", '
+
'ENV{ID_CDROM_MEDIA}=="?*", '
+
- 'IMPORT{program}="/sbin/blkid -o udev -p -u noraid $tempnode"\n')
+
+ 'IMPORT{program}="@blkid@ -o udev -p -u noraid $tempnode"\n')
+
# reload udev
+
subprocess.call('sync; pkill --signal HUP udevd || '
+
'pkill --signal HUP systemd-udevd',
@@ -1142,7 +1142,7 @@ class FS(UDisksTestCase):
self.assertFalse(os.access(f, os.X_OK))
···
udisks_spawned_job_start (job);
g_object_unref (job);
}
-
--
-
2.33.1
-
+2 -2
pkgs/os-specific/linux/usbguard/default.nix
···
assert libgcrypt != null -> libsodium == null;
stdenv.mkDerivation rec {
-
version = "1.1.0";
+
version = "1.1.1";
pname = "usbguard";
src = fetchFromGitHub {
owner = "USBGuard";
repo = pname;
rev = "usbguard-${version}";
-
sha256 = "sha256-lnHeU/X/2N81WPLakRYLs8TjpBhxBPhiXDJ+wNW0sU0=";
+
sha256 = "sha256-lAh+l9GF+FHQqv2kEYU5JienZKGwR5e45BYAwjieYgw=";
fetchSubmodules = true;
};
+9 -2
pkgs/servers/dns/coredns/default.nix
···
vendorSha256 = "sha256-Vxs+k4WF55xwjgdlW/1NM4NWnYqj2EOLOONflj+BoY4=";
-
doCheck = false;
+
postPatch = ''
+
substituteInPlace test/file_cname_proxy_test.go \
+
--replace "TestZoneExternalCNAMELookupWithProxy" \
+
"SkipZoneExternalCNAMELookupWithProxy"
+
+
substituteInPlace test/readme_test.go \
+
--replace "TestReadme" "SkipReadme"
+
'';
meta = with lib; {
homepage = "https://coredns.io";
description = "A DNS server that runs middleware";
license = licenses.asl20;
-
maintainers = with maintainers; [ rushmorem rtreffer deltaevo ];
+
maintainers = with maintainers; [ rushmorem rtreffer deltaevo superherointj ];
};
}
+6 -4
pkgs/servers/http/apache-httpd/2.4.nix
···
-
{ lib, stdenv, fetchurl, perl, zlib, apr, aprutil, pcre, libiconv, lynx
+
{ lib, stdenv, fetchurl, perl, zlib, apr, aprutil, pcre2, libiconv, lynx, which
, nixosTests
, proxySupport ? true
, sslSupport ? true, openssl
···
stdenv.mkDerivation rec {
pname = "apache-httpd";
-
version = "2.4.52";
+
version = "2.4.53";
src = fetchurl {
url = "mirror://apache/httpd/httpd-${version}.tar.bz2";
-
sha256 = "sha256-ASf33El+mYPpxRR0vtdeRWB/L4cKdnWobckK9tVy9ck=";
+
sha256 = "sha256-0LvREhpXtfKm/5LXuW+AUMWkXT8U2xGPZJedUlhY22M=";
};
# FIXME: -dev depends on -doc
outputs = [ "out" "dev" "man" "doc" ];
setOutputFlags = false; # it would move $out/modules, etc.
+
+
nativeBuildInputs = [ which ];
buildInputs = [ perl ] ++
lib.optional brotliSupport brotli ++
···
"--with-apr=${apr.dev}"
"--with-apr-util=${aprutil.dev}"
"--with-z=${zlib.dev}"
-
"--with-pcre=${pcre.dev}"
+
"--with-pcre=${pcre2.dev}/bin/pcre2-config"
"--disable-maintainer-mode"
"--disable-debugger-mode"
"--enable-mods-shared=all"
+6 -11
pkgs/servers/nextcloud/default.nix
···
};
};
in {
-
nextcloud20 = throw ''
-
Nextcloud v20 has been removed from `nixpkgs` as the support for it was dropped
-
by upstream in 2021-10. Please upgrade to at least Nextcloud v21 by declaring
+
nextcloud21 = throw ''
+
Nextcloud v21 has been removed from `nixpkgs` as the support for it was dropped
+
by upstream in 2022-02. Please upgrade to at least Nextcloud v22 by declaring
-
services.nextcloud.package = pkgs.nextcloud21;
+
services.nextcloud.package = pkgs.nextcloud22;
in your NixOS config.
-
WARNING: if you were on Nextcloud 19 on NixOS 21.05 you have to upgrade to Nextcloud 20
-
first on 21.05 because Nextcloud doesn't support upgrades accross multiple major versions!
+
WARNING: if you were on Nextcloud 20 on NixOS 21.11 you have to upgrade to Nextcloud 21
+
first on 21.11 because Nextcloud doesn't support upgrades accross multiple major versions!
'';
-
-
nextcloud21 = generic {
-
version = "21.0.9";
-
sha256 = "sha256-p6bvgTXmmjGN3TRQpG88f3YPksh0QzWG9j9KnEjcrqE=";
-
};
nextcloud22 = generic {
version = "22.2.5";
+3 -3
pkgs/servers/piping-server-rust/default.nix
···
rustPlatform.buildRustPackage rec {
pname = "piping-server-rust";
-
version = "0.12.0";
+
version = "0.12.1";
src = fetchFromGitHub {
owner = "nwtgck";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-eDO2y/4660IAcD9vf1Vt6t3nv3Rc+zCRRFBbW/FeKIw=";
+
sha256 = "sha256-L15ofIM5a/qoJHGXmkuTsmQLLmERG/PxAJ4+z1nn7w4=";
};
-
cargoSha256 = "sha256-U68R543l28osPe0DjuERqB/G6ur/BZDpWMZIO9RObaM=";
+
cargoSha256 = "sha256-CcIM7T7P4LbPxPK1ZqoJRP0IsLMEwMZg9DcuRu0aJHM=";
buildInputs = lib.optionals stdenv.isDarwin [ CoreServices Security ];
+6 -4
pkgs/shells/fish/default.nix
···
fish = stdenv.mkDerivation rec {
pname = "fish";
-
version = "3.3.1";
+
version = "3.4.0";
src = fetchurl {
# There are differences between the release tarball and the tarball GitHub
···
# --version`), as well as the local documentation for all builtins (and
# maybe other things).
url = "https://github.com/fish-shell/fish-shell/releases/download/${version}/${pname}-${version}.tar.xz";
-
sha256 = "sha256-tbTuGlJpdiy76ZOkvWUH5nXkEAzpu+hCFKXusrGfrok=";
+
sha256 = "sha256-tbSKuEhrGe9xajL39GuIuepTVhVfDpZ+6Z9Ak2RUE8U=";
};
-
-
patches = [ ./tests-pcre2-update.patch ]; # should be included in >= 3.4
# Fix FHS paths in tests
postPatch = ''
···
rm tests/pexpects/exit.py
rm tests/pexpects/job_summary.py
rm tests/pexpects/signals.py
+
'' + lib.optionalString (stdenv.isLinux && stdenv.isAarch64) ''
+
# pexpect tests are flaky on aarch64-linux
+
# See https://github.com/fish-shell/fish-shell/issues/8789
+
rm tests/pexpects/exit_handlers.py
'';
nativeBuildInputs = [
-7
pkgs/shells/fish/tests-pcre2-update.patch
···
-
Adapted formating to 3.3.1 from
-
https://github.com/fish-shell/fish-shell/commit/ec8844d834cc9fe626e9fc326c6f5410341d532a
-
--- a/src/fish_tests.cpp
-
+++ b/src/fish_tests.cpp
-
@@ -5726,2 +5725,0 @@
-
- {{L"string", L"match", L"-r", L"(?=ab\\K)", L"ab", 0}, STATUS_CMD_OK, L"\n"},
-
- {{L"string", L"match", L"-r", L"(?=ab\\K)..(?=cd\\K)", L"abcd", 0}, STATUS_CMD_OK, L"\n"},
+29 -4
pkgs/tools/admin/berglas/default.nix
···
{ lib, buildGoModule, fetchFromGitHub }:
+
let
+
skipTests = {
+
access = "Access";
+
create = "Create";
+
delete = "Delete";
+
list = "List";
+
read = "Read";
+
replace = "Replace";
+
resolver = "Resolve";
+
revoke = "Revoke";
+
update = "Update";
+
};
+
+
skipTestsCommand =
+
builtins.foldl' (acc: goFileName:
+
let testName = builtins.getAttr goFileName skipTests; in
+
''
+
${acc}
+
substituteInPlace pkg/berglas/${goFileName}_test.go \
+
--replace "TestClient_${testName}_storage" "SkipClient_${testName}_storage" \
+
--replace "TestClient_${testName}_secretManager" "SkipClient_${testName}_secretManager"
+
''
+
) "" (builtins.attrNames skipTests);
+
in
+
buildGoModule rec {
pname = "berglas";
-
version = "0.5.1";
+
version = "0.6.2";
src = fetchFromGitHub {
owner = "GoogleCloudPlatform";
repo = pname;
rev = "v${version}";
-
sha256 = "0y393g36h35zzqyf5b10j6qq2jhvz83j17cmasnv6wbyrb3vnn0n";
+
sha256 = "sha256-aLsrrK+z080qn7L2zggA8yD+QqLaSRJLTjWQnFKFogQ=";
};
-
vendorSha256 = null;
+
vendorSha256 = "sha256-HjZT0jezJzoEvXuzrjoTv/zSex+xDuGoP1h82CIlX14=";
-
doCheck = false;
+
postPatch = skipTestsCommand;
meta = with lib; {
description = "A tool for managing secrets on Google Cloud";
+2 -2
pkgs/tools/admin/fits-cloudctl/default.nix
···
buildGoModule rec {
pname = "fits-cloudctl";
-
version = "0.10.11";
+
version = "0.10.12";
src = fetchFromGitHub {
owner = "fi-ts";
repo = "cloudctl";
rev = "v${version}";
-
sha256 = "sha256-hGKnQk2OPpHsjbRh/xx3MidbUMio6tYn+oJB0t1a/yM=";
+
sha256 = "sha256-nFxudeJJ5BkfZxSnRquyATHyHwI+7xwfQxiY8cedtis=";
};
vendorSha256 = "sha256-f35Asf9l6ZfixpjMGzesTsxmANreilMxH2CULMH3b2o=";
+3 -3
pkgs/tools/admin/syft/default.nix
···
buildGoModule rec {
pname = "syft";
-
version = "0.41.4";
+
version = "0.41.6";
src = fetchFromGitHub {
owner = "anchore";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-QRT5wvud9VMtXQ8QPC7joIMq7kYYlfVUSgzqMFW5LIE=";
+
sha256 = "sha256-Ebs0IVdcll7bTNjoZalD5Ye0GFXJeas1nPseYLzZxOk=";
# populate values that require us to use git. By doing this in postFetch we
# can delete .git afterwards and maintain better reproducibility of the src.
leaveDotGit = true;
···
find "$out" -name .git -print0 | xargs -0 rm -rf
'';
};
-
vendorSha256 = "sha256-9/Mtjqny68HN4FItT2+yoknzdHBAS1aQL0VkTdm6624=";
+
vendorSha256 = "sha256-/WGkQfCUDmolGdzNxIZKzZnXWnqO2vvizBLJgO+s4Ak=";
nativeBuildInputs = [ installShellFiles ];
+3 -3
pkgs/tools/admin/trivy/default.nix
···
buildGoModule rec {
pname = "trivy";
-
version = "0.24.2";
+
version = "0.24.3";
src = fetchFromGitHub {
owner = "aquasecurity";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-xMLSou+8kIQVEJlkA7ygv623hvETcEAdyFPR3HJr5ZQ=";
+
sha256 = "sha256-8ozoSorVoYt5C4F2FgEwGYQErBVnoTt2FhxiC3/SGsA=";
};
-
vendorSha256 = "sha256-qRkxDvrqMVOsz5r3m3I+E0HAVoUwFykkfGzTz9Qc/S4=";
+
vendorSha256 = "sha256-HM4SxCvvHz7MZsHa8+Orx1KKCRhyZH28JlN9wW+/xIw=";
excludedPackages = "misc";
+24 -11
pkgs/tools/archivers/p7zip/default.nix
···
owner = "jinfeihan57";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-19F4hPV0nKVuFZNbOcXrcA1uW6Y3HQolaHVIYXGmh18=";
+
sha256 = {
+
free = "sha256-DrBuf2VPdcprHI6pMSmL7psm2ofOrUf0Oj0qwMjXzkk=";
+
unfree = "sha256-19F4hPV0nKVuFZNbOcXrcA1uW6Y3HQolaHVIYXGmh18=";
+
}.${if enableUnfree then "unfree" else "free"};
+
# remove the unRAR related code from the src drv
+
# > the license requires that you agree to these use restrictions,
+
# > or you must remove the software (source and binary) from your hard disks
+
# https://fedoraproject.org/wiki/Licensing:Unrar
+
extraPostFetch = lib.optionalString (!enableUnfree) ''
+
rm -r $out/CPP/7zip/Compress/Rar*
+
find $out -name makefile'*' -exec sed -i '/Rar/d' {} +
+
'';
};
# Default makefile is full of impurities on Darwin. The patch doesn't hurt Linux so I'm leaving it unconditional
···
substituteInPlace makefile.machine \
--replace 'CC=gcc' 'CC=${stdenv.cc.targetPrefix}gcc' \
--replace 'CXX=g++' 'CXX=${stdenv.cc.targetPrefix}g++'
-
'' + lib.optionalString (!enableUnfree) ''
-
# Remove non-free RAR source code
-
# (see DOC/License.txt, https://fedoraproject.org/wiki/Licensing:Unrar)
-
rm -r CPP/7zip/Compress/Rar*
-
find . -name makefile'*' -exec sed -i '/Rar/d' {} +
'';
makeFlags = [ "DEST_HOME=${placeholder "out"}" ];
···
NIX_CFLAGS_COMPILE = lib.optionalString stdenv.cc.isClang "-Wno-error=c++11-narrowing";
-
meta = {
+
passthru.updateScript = ./update.sh;
+
+
meta = with lib; {
homepage = "https://github.com/jinfeihan57/p7zip";
description = "A new p7zip fork with additional codecs and improvements (forked from https://sourceforge.net/projects/p7zip/)";
-
platforms = lib.platforms.unix;
-
maintainers = [ lib.maintainers.raskin ];
+
license = with licenses;
+
# p7zip code is largely lgpl2Plus
+
# CPP/7zip/Compress/LzfseDecoder.cpp is bsd3
+
[ lgpl2Plus /* and */ bsd3 ] ++
+
# and CPP/7zip/Compress/Rar* are unfree with the unRAR license restriction
+
# the unRAR compression code is disabled by default
+
lib.optionals enableUnfree [ unfree ];
+
maintainers = with maintainers; [ raskin jk ];
+
platforms = platforms.unix;
mainProgram = "7z";
-
# RAR code is under non-free UnRAR license, but we remove it
-
license = if enableUnfree then lib.licenses.unfree else lib.licenses.lgpl2Plus;
};
}
+47
pkgs/tools/archivers/p7zip/update.sh
···
+
#! /usr/bin/env nix-shell
+
#! nix-shell -i bash -p coreutils gnused curl jq
+
set -euo pipefail
+
cd "$(dirname "${BASH_SOURCE[0]}")"
+
+
DRV_DIR="$PWD"
+
+
OLD_VERSION="$(sed -nE 's/\s*version = "(.*)".*/\1/p' ./default.nix)"
+
+
NEW_VERSION="$(curl https://api.github.com/repos/jinfeihan57/p7zip/releases/latest | jq .tag_name -r | tr -d 'v')"
+
+
echo "comparing versions $OLD_VERSION => $NEW_VERSION"
+
if [[ "$OLD_VERSION" == "$NEW_VERSION" ]]; then
+
echo "Already up to date! Doing nothing"
+
exit 0
+
fi
+
+
NIXPKGS_ROOT="$(realpath "$DRV_DIR/../../../..")"
+
+
echo "getting free source hash"
+
OLD_FREE_HASH="$(nix-instantiate --eval --strict -E "with import $NIXPKGS_ROOT {}; p7zip.src.drvAttrs.outputHash" | tr -d '"')"
+
echo "getting unfree source hash"
+
OLD_UNFREE_HASH="$(nix-instantiate --eval --strict -E "with import $NIXPKGS_ROOT {}; (p7zip.override { enableUnfree = true; }).src.drvAttrs.outputHash" | tr -d '"')"
+
+
+
NEW_FREE_HASH=$(nix-prefetch -f "$NIXPKGS_ROOT" -E "p7zip.src" --rev "v$NEW_VERSION")
+
+
NEW_UNFREE_OUT=$(nix-prefetch -f "$NIXPKGS_ROOT" -E "(p7zip.override { enableUnfree = true; }).src" --rev "v$NEW_VERSION" --output raw --print-path)
+
# first line of raw output is the hash
+
NEW_UNFREE_HASH="$(echo "$NEW_UNFREE_OUT" | sed -n 1p)"
+
# second line of raw output is the src path
+
NEW_UNFREE_SRC="$(echo "$NEW_UNFREE_OUT" | sed -n 2p)"
+
# make sure to nuke the unfree src from the updater's machine
+
# > the license requires that you agree to these use restrictions, or you must remove the software (source and binary) from your hard disks
+
# https://fedoraproject.org/wiki/Licensing:Unrar
+
nix-store --delete "$NEW_UNFREE_SRC"
+
+
+
echo "updating version"
+
sed -i "s/version = \"$OLD_VERSION\";/version = \"$NEW_VERSION\";/" "$DRV_DIR/default.nix"
+
+
echo "updating free hash"
+
sed -i "s@free = \"$OLD_FREE_HASH\";@free = \"$NEW_FREE_HASH\";@" "$DRV_DIR/default.nix"
+
echo "updating unfree hash"
+
sed -i "s@unfree = \"$OLD_UNFREE_HASH\";@unfree = \"$NEW_UNFREE_HASH\";@" "$DRV_DIR/default.nix"
+
+
echo "done"
+2 -2
pkgs/tools/backup/btrbk/default.nix
···
stdenv.mkDerivation rec {
pname = "btrbk";
-
version = "0.32.0";
+
version = "0.32.1";
src = fetchurl {
url = "https://digint.ch/download/btrbk/releases/${pname}-${version}.tar.xz";
-
sha256 = "HmvNtIgFfeaiFuSRobWlcJqusPSYtqAqx+79+CeNVDQ=";
+
sha256 = "flQf1KTybPImDoD+iNe+P+u1rOiYxXjQoltuGPWuX3g=";
};
nativeBuildInputs = [ asciidoctor makeWrapper ];
+7 -56
pkgs/tools/misc/plantuml-server/default.nix
···
-
{ lib, stdenv, fetchFromGitHub, maven, jdk17_headless }:
+
{ lib, stdenv, fetchurl }:
let
version = "1.2022.2";
-
-
src = fetchFromGitHub {
-
owner = "plantuml";
-
repo = "plantuml-server";
-
rev = "v${version}";
-
sha256 = "sha256-55IBhulFo42jscBFrHM39qA0GRgKBoYNye4q9QkmjsM=";
-
};
-
-
# perform fake build to make a fixed-output derivation out of the files downloaded from maven central
-
deps = stdenv.mkDerivation {
-
name = "plantuml-server-${version}-deps";
-
inherit src;
-
nativeBuildInputs = [ jdk17_headless maven ];
-
buildPhase = ''
-
runHook preBuild
-
-
while mvn package -Dmaven.repo.local=$out/.m2; [ $? = 1 ]; do
-
echo "timeout, restart maven to continue downloading"
-
done
-
-
runHook postBuild
-
'';
-
# keep only *.{pom,jar,sha1,nbm} and delete all ephemeral files with lastModified timestamps inside
-
installPhase = ''
-
find $out/.m2 -type f -regex '.+\(\.lastUpdated\|resolver-status\.properties\|_remote\.repositories\)' -delete
-
'';
-
outputHashAlgo = "sha256";
-
outputHashMode = "recursive";
-
outputHash = "sha256-AheCBX5jFzDHqTI2pCWBIiDESEKMClXlvWIcFvu0goA=";
-
};
in
-
stdenv.mkDerivation rec {
pname = "plantuml-server";
inherit version;
-
inherit src;
-
-
nativeBuildInputs = [ jdk17_headless maven ];
-
-
buildPhase = ''
-
runHook preBuild
+
src = fetchurl {
+
url = "https://github.com/plantuml/plantuml-server/releases/download/v${version}/plantuml-v${version}.war";
+
sha256 = "sha256-h4ulXzZ5L+VPhk2CnZQNxfnEJzWT3B9TNvDEWt4o9Hk=";
+
};
-
# maven can output reproducible files after setting project.build.outputTimestamp property
-
# see https://maven.apache.org/guides/mini/guide-reproducible-builds.html#how-do-i-configure-my-maven-build
-
# 'maven.repo.local' must be writable so copy it out of nix store
-
cp -R $src repo
-
chmod +w -R repo
-
cd repo
-
mvn package --offline \
-
-Dproject.build.outputTimestamp=0 \
-
-Dmaven.repo.local=$(cp -dpR ${deps}/.m2 ./ && chmod +w -R .m2 && pwd)/.m2
-
-
runHook postBuild
-
'';
-
+
dontUnpack = true;
installPhase = ''
-
runHook preInstall
-
mkdir -p "$out/webapps"
-
cp "target/plantuml.war" "$out/webapps/plantuml.war"
-
-
runHook postInstall
+
cp "$src" "$out/webapps/plantuml.war"
'';
meta = with lib; {
-2
pkgs/tools/misc/up/default.nix
···
vendorSha256 = "1q8wfsfl3rz698ck5q5s5z6iw9k134fxxvwipcp2b052n998rcrx";
-
doCheck = false;
-
meta = with lib; {
description = "Ultimate Plumber is a tool for writing Linux pipes with instant live preview";
homepage = "https://github.com/akavel/up";
+3 -3
pkgs/tools/networking/frp/default.nix
···
buildGoModule rec {
pname = "frp";
-
version = "0.39.1";
+
version = "0.40.0";
src = fetchFromGitHub {
owner = "fatedier";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-tqdrYrIWbmRn+5iAZKd9GlcmFNQnh3yNxZ95As7+v5Q=";
+
sha256 = "sha256-W+88Fq9oYDBLCNp+6rc9jACJzky7FCZg/xLDowGGdm0=";
};
-
vendorSha256 = "sha256-NPnchl+N6DeqMhsOIw2MYD/i2IZzHS9ZqbUOeulgb90=";
+
vendorSha256 = "sha256-iBjMFOERWQ1aPn+2gEoI9og2ov2LlBVV1sLAZlvqZPM=";
doCheck = false;
-2
pkgs/tools/networking/hey/default.nix
···
vendorSha256 = null;
-
doCheck = false;
-
meta = with lib; {
description = "HTTP load generator, ApacheBench (ab) replacement";
homepage = "https://github.com/rakyll/hey";
+11 -7
pkgs/tools/networking/httpie/default.nix
···
{ lib
, fetchFromGitHub
, installShellFiles
-
, python3Packages
+
, python3
, pandoc
}:
-
python3Packages.buildPythonApplication rec {
+
python3.pkgs.buildPythonApplication rec {
pname = "httpie";
-
version = "3.0.2";
+
version = "3.1.0";
+
format = "setuptools";
src = fetchFromGitHub {
owner = "httpie";
repo = "httpie";
rev = version;
-
sha256 = "sha256-s3IFzEUQmPBocgspVGx1nINkUamsi7tzwW37IqdBMxo=";
+
hash = "sha256-x7Zucb2i8D4Xbn77eBzSxOAcc2fGg5MFKFiyJhytQ0s=";
};
nativeBuildInputs = [
···
pandoc
];
-
propagatedBuildInputs = with python3Packages; [
+
propagatedBuildInputs = with python3.pkgs; [
charset-normalizer
defusedxml
multidict
···
setuptools
];
-
checkInputs = with python3Packages; [
+
checkInputs = with python3.pkgs; [
mock
pytest
pytest-httpbin
+
pytest-lazy-fixture
pytestCheckHook
responses
];
···
"httpie.encoding.detect_encoding"
];
-
pythonImportsCheck = [ "httpie" ];
+
pythonImportsCheck = [
+
"httpie"
+
];
meta = with lib; {
description = "A command line HTTP client whose goal is to make CLI human-friendly";
+2 -2
pkgs/tools/networking/pirate-get/default.nix
···
buildPythonApplication rec {
pname = "pirate-get";
-
version = "0.4.1";
+
version = "0.4.2";
src = fetchPypi {
inherit pname version;
-
sha256 = "0pr703fwinr2f4rba86zp57mpf5j2jgvp5n50rc5vy5g7yfwsddm";
+
sha256 = "sha256-VtnVyJqrdGXTqcyzpHCOMUI9G7/BkXzihDrBrsxl7Eg=";
};
propagatedBuildInputs = [ colorama veryprettytable pyperclip ];
+19 -160
pkgs/tools/nix/nix-output-monitor/default.nix
···
-
# This file has been autogenerate with cabal2nix.
-
# Update via ./update.sh"
{
-
mkDerivation,
-
ansi-terminal,
-
async,
-
attoparsec,
-
base,
-
bytestring,
-
cassava,
-
containers,
-
data-default,
-
directory,
+
haskell,
expect,
-
extra,
-
fetchzip,
-
filepath,
-
generic-optics,
-
HUnit,
+
haskellPackages,
installShellFiles,
-
lib,
-
lock-file,
-
MemoTrie,
-
mtl,
-
nix-derivation,
-
optics,
-
process,
-
random,
-
relude,
-
runtimeShell,
-
safe,
-
stm,
-
streamly,
-
terminal-size,
-
text,
-
time,
-
unix,
-
vector,
-
wcwidth,
-
word8,
-
}:
-
mkDerivation {
-
pname = "nix-output-monitor";
-
version = "1.1.2.0";
-
src = fetchzip {
-
url = "https://github.com/maralorn/nix-output-monitor/archive/refs/tags/v1.1.2.0.tar.gz";
-
sha256 = "03qhy4xzika41pxlmvpz3psgy54va72ipn9v1lv33l6369ikrhl1";
+
}: let
+
inherit (haskell.lib.compose) justStaticExecutables overrideCabal;
+
overrides = {
+
passthru.updateScript = ./update.sh;
+
testTarget = "unit-tests";
+
buildTools = [installShellFiles];
+
postInstall = ''
+
substitute "exe-sh/nom-build" "$out/bin/nom-build" \
+
--replace 'unbuffer' '${expect}/bin/unbuffer' \
+
--replace 'nom' "$out/bin/nom"
+
chmod a+x $out/bin/nom-build
+
installShellCompletion --zsh --name _nom-build completions/completion.zsh
+
'';
};
-
isLibrary = true;
-
isExecutable = true;
-
libraryHaskellDepends = [
-
ansi-terminal
-
async
-
attoparsec
-
base
-
bytestring
-
cassava
-
containers
-
data-default
-
directory
-
extra
-
filepath
-
generic-optics
-
lock-file
-
MemoTrie
-
mtl
-
nix-derivation
-
optics
-
random
-
relude
-
safe
-
stm
-
streamly
-
terminal-size
-
text
-
time
-
unix
-
vector
-
wcwidth
-
word8
-
];
-
executableHaskellDepends = [
-
ansi-terminal
-
async
-
attoparsec
-
base
-
bytestring
-
cassava
-
containers
-
data-default
-
directory
-
extra
-
filepath
-
generic-optics
-
lock-file
-
MemoTrie
-
mtl
-
nix-derivation
-
optics
-
random
-
relude
-
safe
-
stm
-
streamly
-
terminal-size
-
text
-
time
-
unix
-
vector
-
wcwidth
-
word8
-
];
-
testHaskellDepends = [
-
ansi-terminal
-
async
-
attoparsec
-
base
-
bytestring
-
cassava
-
containers
-
data-default
-
directory
-
extra
-
filepath
-
generic-optics
-
HUnit
-
lock-file
-
MemoTrie
-
mtl
-
nix-derivation
-
optics
-
process
-
random
-
relude
-
safe
-
stm
-
streamly
-
terminal-size
-
text
-
time
-
unix
-
vector
-
wcwidth
-
word8
-
];
-
homepage = "https://github.com/maralorn/nix-output-monitor";
-
description = "Parses output of nix-build to show additional information";
-
license = lib.licenses.agpl3Plus;
-
maintainers = with lib.maintainers; [maralorn];
-
passthru.updateScript = ./update.sh;
-
testTarget = "unit-tests";
-
buildTools = [installShellFiles];
-
postInstall = ''
-
cat > $out/bin/nom-build << EOF
-
#!${runtimeShell}
-
${expect}/bin/unbuffer nix-build "\$@" 2>&1 | exec $out/bin/nom
-
EOF
-
chmod a+x $out/bin/nom-build
-
installShellCompletion --zsh --name _nom-build ${builtins.toFile "completion.zsh" ''
-
#compdef nom-build
-
compdef nom-build=nix-build
-
''}
-
'';
-
}
+
in
+
justStaticExecutables
+
(overrideCabal overrides
+
(haskellPackages.callPackage ./generated-package.nix {}))
+147
pkgs/tools/nix/nix-output-monitor/generated-package.nix
···
+
# This file has been autogenerate with cabal2nix.
+
# Update via ./update.sh"
+
{
+
mkDerivation,
+
ansi-terminal,
+
async,
+
attoparsec,
+
base,
+
bytestring,
+
cassava,
+
containers,
+
data-default,
+
directory,
+
extra,
+
fetchzip,
+
filepath,
+
generic-optics,
+
HUnit,
+
lib,
+
lock-file,
+
MemoTrie,
+
mtl,
+
nix-derivation,
+
optics,
+
process,
+
random,
+
relude,
+
safe,
+
stm,
+
streamly,
+
terminal-size,
+
text,
+
time,
+
unix,
+
vector,
+
wcwidth,
+
word8,
+
}:
+
mkDerivation {
+
pname = "nix-output-monitor";
+
version = "1.1.2.1";
+
src = fetchzip {
+
url = "https://github.com/maralorn/nix-output-monitor/archive/refs/tags/v1.1.2.1.tar.gz";
+
sha256 = "00jn963jskyqnwvbvn5x0z92x2gv105p5h8m13nlmr90lj4axynx";
+
};
+
isLibrary = true;
+
isExecutable = true;
+
libraryHaskellDepends = [
+
ansi-terminal
+
async
+
attoparsec
+
base
+
bytestring
+
cassava
+
containers
+
data-default
+
directory
+
extra
+
filepath
+
generic-optics
+
lock-file
+
MemoTrie
+
mtl
+
nix-derivation
+
optics
+
random
+
relude
+
safe
+
stm
+
streamly
+
terminal-size
+
text
+
time
+
unix
+
vector
+
wcwidth
+
word8
+
];
+
executableHaskellDepends = [
+
ansi-terminal
+
async
+
attoparsec
+
base
+
bytestring
+
cassava
+
containers
+
data-default
+
directory
+
extra
+
filepath
+
generic-optics
+
lock-file
+
MemoTrie
+
mtl
+
nix-derivation
+
optics
+
random
+
relude
+
safe
+
stm
+
streamly
+
terminal-size
+
text
+
time
+
unix
+
vector
+
wcwidth
+
word8
+
];
+
testHaskellDepends = [
+
ansi-terminal
+
async
+
attoparsec
+
base
+
bytestring
+
cassava
+
containers
+
data-default
+
directory
+
extra
+
filepath
+
generic-optics
+
HUnit
+
lock-file
+
MemoTrie
+
mtl
+
nix-derivation
+
optics
+
process
+
random
+
relude
+
safe
+
stm
+
streamly
+
terminal-size
+
text
+
time
+
unix
+
vector
+
wcwidth
+
word8
+
];
+
homepage = "https://github.com/maralorn/nix-output-monitor";
+
description = "Parses output of nix-build to show additional information";
+
license = lib.licenses.agpl3Plus;
+
maintainers = with lib.maintainers; [maralorn];
+
}
+2 -23
pkgs/tools/nix/nix-output-monitor/update.sh
···
# This is the directory of this update.sh script.
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
-
derivation_file="${script_dir}/default.nix"
+
derivation_file="${script_dir}/generated-package.nix"
# This is the latest released version of nix-output-monitor on GitHub.
new_version=$(curl --silent "https://api.github.com/repos/maralorn/nix-output-monitor/releases" | jq '.[0].tag_name' --raw-output)
···
EOF
cabal2nix \
-
--extra-arguments expect \
-
--extra-arguments runtimeShell\
-
--extra-arguments installShellFiles\
--maintainer maralorn \
"https://github.com/maralorn/nix-output-monitor/archive/refs/tags/${new_version}.tar.gz" \
-
| head -n-1 >> "$derivation_file"
-
-
cat >> "$derivation_file" << EOF
-
passthru.updateScript = ./update.sh;
-
testTarget = "unit-tests";
-
buildTools = [ installShellFiles ];
-
postInstall = ''
-
cat > \$out/bin/nom-build << EOF
-
#!\${runtimeShell}
-
\${expect}/bin/unbuffer nix-build "\\\$@" 2>&1 | exec \$out/bin/nom
-
EOF
-
chmod a+x \$out/bin/nom-build
-
installShellCompletion --zsh --name _nom-build \${builtins.toFile "completion.zsh" ''
-
#compdef nom-build
-
compdef nom-build=nix-build
-
''}
-
'';
-
}
-
EOF
+
>> "$derivation_file"
alejandra "${derivation_file}" | cat
+3 -3
pkgs/tools/security/cariddi/default.nix
···
buildGoModule rec {
pname = "cariddi";
-
version = "1.1.5";
+
version = "1.1.6";
src = fetchFromGitHub {
owner = "edoardottt";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-PXQljC9rwlxXQ96fII3EjD4NXu61EMkYvMWqkcJZ4vU=";
+
sha256 = "sha256-/ez2biYU8NnVny8v5Mu9pLq0oqzcIenpyEb3qkPd9v8=";
};
-
vendorSha256 = "sha256-zNUdglsfy6lEV54afCAoigxa3rR0qf/e3+B4PvVRIa4=";
+
vendorSha256 = "sha256-zJ39tAq+ooROMHG1vC2m2rbq+wttxqYxAd2hLg5GtJM=";
meta = with lib; {
description = "Crawler for URLs and endpoints";
+2 -2
pkgs/tools/security/cfripper/default.nix
···
python3.pkgs.buildPythonApplication rec {
pname = "cfripper";
-
version = "1.5.1";
+
version = "1.5.2";
src = fetchFromGitHub {
owner = "Skyscanner";
repo = pname;
rev = version;
-
hash = "sha256-/qcpLCk1ZZMKxhqK6q6sSbRDjiF5GQmDJzvCaV2kAqQ=";
+
hash = "sha256-tl0g08nnY1CZ4SNcMFPARIRquiO9SCen9VWeNalLHds=";
};
propagatedBuildInputs = with python3.pkgs; [
+2 -2
pkgs/tools/security/exploitdb/default.nix
···
stdenv.mkDerivation rec {
pname = "exploitdb";
-
version = "2022-03-11";
+
version = "2022-03-15";
src = fetchFromGitHub {
owner = "offensive-security";
repo = pname;
rev = version;
-
sha256 = "sha256-dW4cLm//4wROsizRQ59sqEGPRZ26yIU5I7mdPEYC3YU=";
+
sha256 = "sha256-whV7zg7njGGjUpxsXZiNwVfHgrlop2RLZnCsBWQ+HkY=";
};
+45
pkgs/tools/security/feroxbuster/default.nix
···
+
{ lib
+
, stdenv
+
, fetchFromGitHub
+
, openssl
+
, pkg-config
+
, rustPlatform
+
, Security
+
}:
+
+
rustPlatform.buildRustPackage rec {
+
pname = "feroxbuster";
+
version = "2.6.1";
+
+
src = fetchFromGitHub {
+
owner = "epi052";
+
repo = pname;
+
rev = "v${version}";
+
hash = "sha256-RY9bFuALRaVXDrC0eIx0inPjRqNpRKNZf3mCrKIdGL8=";
+
};
+
+
cargoSha256 = "sha256-0Zawlx/lhF7K8nOsHYKO84pnctVMpm3RfnAFCOltOqE=";
+
+
OPENSSL_NO_VENDOR = true;
+
+
nativeBuildInputs = [
+
pkg-config
+
];
+
+
buildInputs = [
+
openssl
+
] ++ lib.optionals stdenv.isDarwin [
+
Security
+
];
+
+
# Tests require network access
+
doCheck = false;
+
+
meta = with lib; {
+
description = "Fast, simple, recursive content discovery tool";
+
homepage = "https://github.com/epi052/feroxbuster";
+
license = with licenses; [ mit ];
+
maintainers = with maintainers; [ fab ];
+
};
+
}
+
+3 -3
pkgs/tools/security/grype/default.nix
···
buildGoModule rec {
pname = "grype";
-
version = "0.33.1";
+
version = "0.34.2";
src = fetchFromGitHub {
owner = "anchore";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-5QjyGIpxnrwTnEmi0D16vPKodg3+SKiINFONwU2OzC0=";
+
sha256 = "sha256-tMkMGM45/LcFllEgQ3UTl6FsLJmdsU8SLcLH/8+zMA4=";
# populate values that require us to use git. By doing this in postFetch we
# can delete .git afterwards and maintain better reproducibility of the src.
leaveDotGit = true;
···
'';
};
-
vendorSha256 = "sha256-CPMfQv9oiLbIMkZe/t482LzssoNTcNVJdr2o2wJecSA=";
+
vendorSha256 = "sha256-WrUZFlN7dPbyN9InjX/Y9J+iYKu5v2/SHmRgDP5BJi8=";
nativeBuildInputs = [
installShellFiles
+2 -2
pkgs/tools/security/sigma-cli/default.nix
···
python3.pkgs.buildPythonApplication rec {
pname = "sigma-cli";
-
version = "0.3.0";
+
version = "0.3.2";
format = "pyproject";
src = fetchFromGitHub {
owner = "SigmaHQ";
repo = pname;
rev = "v${version}";
-
hash = "sha256-Nfd78Y35naDTzwodcdvJr/02CptcHxS717VGsR/QOuI=";
+
hash = "sha256-We6vJXLIxGe//78pgJFrihFJHl0gRd02I53hoYWcao0=";
};
nativeBuildInputs = with python3.pkgs; [
+3 -3
pkgs/tools/security/step-ca/default.nix
···
buildGoModule rec {
pname = "step-ca";
-
version = "0.18.1";
+
version = "0.18.2";
src = fetchFromGitHub {
owner = "smallstep";
repo = "certificates";
rev = "v${version}";
-
sha256 = "sha256-oebmJ+xrJTV5gXH3U1lWCSQMHiVnUTa0ZTp39sVB7KM=";
+
sha256 = "sha256-BhPup3q2muYGWzAa/9b4vnIjBces4GhUHZ/mg4CWMRc=";
};
-
vendorSha256 = "sha256-IJXJS+Z93Hw1I1CAeRv4mq8as9DKebqNFa0IMgZ+Kic=";
+
vendorSha256 = "sha256-oVaziWZGslZCVqkEXL32XvOVU54VOf41Qg+VoVWo7x0=";
ldflags = [ "-buildid=" ];
+2 -2
pkgs/tools/system/syslog-ng/default.nix
···
stdenv.mkDerivation rec {
pname = "syslog-ng";
-
version = "3.35.1";
+
version = "3.36.1";
src = fetchurl {
url = "https://github.com/${pname}/${pname}/releases/download/${pname}-${version}/${pname}-${version}.tar.gz";
-
sha256 = "sha256-HQI4sGs+WYfIWeW1Kezuc491us/wSxSTmLH+jLsSHlM=";
+
sha256 = "sha256-kKJcl2f+dJ21DxGN38kuxxOZdj0uzVrU8R/17qBJ5gs=";
};
nativeBuildInputs = [ pkg-config which ];
+1
pkgs/top-level/aliases.nix
···
claws-mail-gtk2 = throw "claws-mail-gtk2 was removed to get rid of Python 2, please use claws-mail"; # Added 2021-12-05
claws-mail-gtk3 = claws-mail; # Added 2021-07-10
clawsMail = throw "'clawsMail' has been renamed to/replaced by 'claws-mail'"; # Converted to throw 2022-02-22
+
clearsilver = throw "clearsilver has been removed: abandoned by upstream"; # Added 2022-03-15
clutter_gtk = throw "'clutter_gtk' has been renamed to/replaced by 'clutter-gtk'"; # Converted to throw 2022-02-22
cmakeWithQt4Gui = throw "cmakeWithQt4Gui has been removed in favor of cmakeWithGui (Qt 5)"; # Added 2021-05
codimd = hedgedoc; # Added 2020-11-29
+17 -6
pkgs/top-level/all-packages.nix
···
borgmatic = callPackage ../tools/backup/borgmatic { };
+
borg-sans-mono = callPackage ../data/fonts/borg-sans-mono { };
+
boringtun = callPackage ../tools/networking/boringtun { };
bookstack = callPackage ../servers/web-apps/bookstack { };
···
inherit (darwin.apple_sdk.frameworks) Security;
};
+
lapce = callPackage ../applications/editors/lapce { };
+
lcdproc = callPackage ../servers/monitoring/lcdproc { };
languagetool = callPackage ../tools/text/languagetool { };
···
nix-direnv = callPackage ../tools/misc/nix-direnv { };
-
nix-output-monitor = haskell.lib.compose.justStaticExecutables (haskellPackages.nix-output-monitor);
+
nix-output-monitor = callPackage ../tools/nix/nix-output-monitor { };
nix-template = callPackage ../tools/package-management/nix-template {
inherit (darwin.apple_sdk.frameworks) Security;
···
ferm = callPackage ../tools/networking/ferm { };
+
feroxbuster = callPackage ../tools/security/feroxbuster {
+
inherit (darwin.apple_sdk.frameworks) Security;
+
};
+
ffsend = callPackage ../tools/misc/ffsend {
inherit (darwin.apple_sdk.frameworks) CoreFoundation CoreServices Security AppKit;
};
···
grocy = callPackage ../servers/grocy { };
inherit (callPackage ../servers/nextcloud {})
-
nextcloud20 nextcloud21 nextcloud22 nextcloud23;
+
nextcloud21 nextcloud22 nextcloud23;
nextcloud-client = libsForQt5.callPackage ../applications/networking/nextcloud-client { };
···
self = pkgsi686Linux.callPackage ../development/interpreters/self { };
inherit (callPackages ../applications/networking/cluster/spark { })
-
spark3
-
spark2;
+
spark_3_2
+
spark_3_1
+
spark_2_4;
+
spark3 = spark_3_2;
+
spark2 = spark_2_4;
spark = spark3;
sparkleshare = callPackage ../applications/version-management/sparkleshare { };
···
classads = callPackage ../development/libraries/classads { };
-
clearsilver = callPackage ../development/libraries/clearsilver { };
-
clfft = callPackage ../development/libraries/clfft { };
clipp = callPackage ../development/libraries/clipp { };
···
libctb = callPackage ../development/libraries/libctb { };
libctemplate = callPackage ../development/libraries/libctemplate { };
+
+
libctl = callPackage ../development/libraries/libctl { };
libcotp = callPackage ../development/libraries/libcotp { };
+2
pkgs/top-level/python-packages.nix
···
docx2python = callPackage ../development/python-modules/docx2python { };
+
docx2txt = callPackage ../development/python-modules/docx2txt { };
+
dodgy = callPackage ../development/python-modules/dodgy { };
dogpile-cache = callPackage ../development/python-modules/dogpile-cache { };