nixpkgs-fmt update

This commit is contained in:
Charlotte Van Petegem 2020-04-27 14:46:52 +02:00
parent 5843e44ed6
commit d151c29b80
22 changed files with 277 additions and 282 deletions

View file

@ -6,7 +6,7 @@
boot.initrd.availableKernelModules = [ "xhci_pci" "ahci" "usbhid" "usb_storage" "sd_mod" "rtsx_pci_sdmmc" ];
boot.initrd.kernelModules = [ "i915" ];
boot.kernelModules = [ "kvm-intel" ];
boot.extraModulePackages = [];
boot.extraModulePackages = [ ];
fileSystems."/" = {
device = "/dev/disk/by-uuid/e4b7eae2-546d-412b-9258-389315f4b835";

View file

@ -7,7 +7,7 @@
# Early KMS start
boot.initrd.kernelModules = [ "i915" ];
boot.kernelModules = [ "kvm-intel" ];
boot.extraModulePackages = [];
boot.extraModulePackages = [ ];
fileSystems."/" = {
device = "/dev/disk/by-uuid/0eb8b94a-5fc2-4b24-962a-94b3675b6f5b";

View file

@ -1,8 +1,8 @@
{ pkgs ? import <nixpkgs> {} }:
{ pkgs ? import <nixpkgs> { } }:
with pkgs;
let
buildGradle = callPackage ./gradle-env.nix {};
buildGradle = callPackage ./gradle-env.nix { };
in
buildGradle {
envSpec = ./gradle-env.json;

View file

@ -40,7 +40,6 @@ let
versionOlder unique mapAttrs last concatMapStringsSep removeSuffix
optionalString groupBy' readFile hasSuffix
;
mkDep = depSpec: stdenv.mkDerivation {
inherit (depSpec) name;
@ -55,13 +54,11 @@ let
ln -s $src $out/${depSpec.path}/${depSpec.name}
'';
};
mkModuleMetadata = deps:
let
ids = filter
(id: id.type == "pom")
(map (dep: dep.id) deps);
modules = groupBy'
(
meta: id:
@ -70,7 +67,8 @@ let
isNewerRelease =
!(hasSuffix "-SNAPSHOT" id.version)
&& versionOlder meta.release id.version;
in {
in
{
groupId = id.group;
artifactId = id.name;
latest = if isNewer then id.version else meta.latest;
@ -81,17 +79,19 @@ let
{
latest = "";
release = "";
versions = [];
versions = [ ];
}
(id: "${replaceStrings [ "." ] [ "/" ] id.group}/${id.name}/maven-metadata.xml")
ids;
in
attrValues (
mapAttrs (
path: meta:
let
versions' = sort versionOlder (unique meta.versions);
in
attrValues
(
mapAttrs
(
path: meta:
let
versions' = sort versionOlder (unique meta.versions);
in
with meta; writeTextDir path ''
<?xml version="1.0" encoding="UTF-8"?>
<metadata modelVersion="1.1">
@ -106,13 +106,11 @@ let
</versioning>
</metadata>
''
) modules
) modules
);
mkSnapshotMetadata = deps:
let
snapshotDeps = filter (dep: dep ? build && dep ? timestamp) deps;
modules = groupBy'
(
meta: dep:
@ -120,17 +118,19 @@ let
id = dep.id;
isNewer = dep.build > meta.buildNumber;
# Timestamp values can be bogus, e.g. jitpack.io
updated = if (match "[0-9]{8}\.[0-9]{6}" dep.timestamp) != null
then replaceStrings [ "." ] [ "" ] dep.timestamp
else "";
in {
updated =
if (match "[0-9]{8}\.[0-9]{6}" dep.timestamp) != null
then replaceStrings [ "." ] [ "" ] dep.timestamp
else "";
in
{
groupId = id.group;
artifactId = id.name;
version = id.version;
timestamp = if isNewer then dep.timestamp else meta.timestamp;
buildNumber = if isNewer then dep.build else meta.buildNumber;
lastUpdated = if isNewer then updated else meta.lastUpdated;
versions = meta.versions or [] ++ [
versions = meta.versions or [ ] ++ [
{
classifier = id.classifier or "";
extension = id.extension;
@ -147,7 +147,6 @@ let
}
(dep: "${replaceStrings [ "." ] [ "/" ] dep.id.group}/${dep.id.name}/${dep.id.version}/maven-metadata.xml")
snapshotDeps;
mkSnapshotVersion = version: ''
<snapshotVersion>
${optionalString (version.classifier != "") "<classifier>${version.classifier}</classifier>"}
@ -157,77 +156,76 @@ let
</snapshotVersion>
'';
in
attrValues (
mapAttrs (
path: meta:
with meta; writeTextDir path ''
<?xml version="1.0" encoding="UTF-8"?>
<metadata modelVersion="1.1">
<groupId>${groupId}</groupId>
<artifactId>${artifactId}</artifactId>
<version>${version}</version>
<versioning>
<snapshot>
${optionalString (timestamp != "") "<timestamp>${timestamp}</timestamp>"}
${optionalString (buildNumber != -1) "<buildNumber>${toString buildNumber}</buildNumber>"}
</snapshot>
${optionalString (lastUpdated != "") "<lastUpdated>${lastUpdated}</lastUpdated>"}
<snapshotVersions>
${concatMapStringsSep "\n " mkSnapshotVersion versions}
</snapshotVersions>
</versioning>
</metadata>
''
) modules
attrValues
(
mapAttrs
(
path: meta:
with meta; writeTextDir path ''
<?xml version="1.0" encoding="UTF-8"?>
<metadata modelVersion="1.1">
<groupId>${groupId}</groupId>
<artifactId>${artifactId}</artifactId>
<version>${version}</version>
<versioning>
<snapshot>
${optionalString (timestamp != "") "<timestamp>${timestamp}</timestamp>"}
${optionalString (buildNumber != -1) "<buildNumber>${toString buildNumber}</buildNumber>"}
</snapshot>
${optionalString (lastUpdated != "") "<lastUpdated>${lastUpdated}</lastUpdated>"}
<snapshotVersions>
${concatMapStringsSep "\n " mkSnapshotVersion versions}
</snapshotVersions>
</versioning>
</metadata>
''
) modules
);
mkRepo = project: type: deps: buildEnv {
name = "${project}-gradle-${type}-env";
paths = map mkDep deps ++ mkModuleMetadata deps ++ mkSnapshotMetadata deps;
};
mkInitScript = projectSpec:
let
repos = mapAttrs (mkRepo projectSpec.name) projectSpec.dependencies;
in
writeText "init.gradle" ''
static def offlineRepo(RepositoryHandler repositories, String env, String path) {
repositories.clear()
repositories.maven {
name "Nix''${env.capitalize()}MavenOffline"
url path
metadataSources {
it.gradleMetadata()
it.mavenPom()
it.artifact()
}
}
repositories.ivy {
name "Nix''${env.capitalize()}IvyOffline"
url path
layout "maven"
metadataSources {
it.gradleMetadata()
it.ivyDescriptor()
it.artifact()
}
}
}
writeText "init.gradle" ''
static def offlineRepo(RepositoryHandler repositories, String env, String path) {
repositories.clear()
repositories.maven {
name "Nix''${env.capitalize()}MavenOffline"
url path
metadataSources {
it.gradleMetadata()
it.mavenPom()
it.artifact()
}
}
repositories.ivy {
name "Nix''${env.capitalize()}IvyOffline"
url path
layout "maven"
metadataSources {
it.gradleMetadata()
it.ivyDescriptor()
it.artifact()
}
}
}
gradle.settingsEvaluated {
offlineRepo(it.pluginManagement.repositories, "plugin", "${repos.plugin}")
}
gradle.projectsLoaded {
allprojects {
buildscript {
offlineRepo(repositories, "buildscript", "${repos.buildscript}")
}
offlineRepo(repositories, "project", "${repos.project}")
}
}
'';
gradle.settingsEvaluated {
offlineRepo(it.pluginManagement.repositories, "plugin", "${repos.plugin}")
}
gradle.projectsLoaded {
allprojects {
buildscript {
offlineRepo(repositories, "buildscript", "${repos.buildscript}")
}
offlineRepo(repositories, "project", "${repos.project}")
}
}
'';
mkGradle = gradleSpec:
gradleGen.gradleGen {
inherit (gradleSpec) nativeVersion;
@ -238,46 +236,44 @@ let
inherit (gradleSpec) url sha256;
};
};
mkProjectEnv = projectSpec: {
inherit (projectSpec) name version;
initScript = mkInitScript projectSpec;
gradle = args.gradlePackage or mkGradle projectSpec.gradle;
};
gradleEnv = mapAttrs
(_: p: mkProjectEnv p)
(fromJSON (readFile envSpec));
projectEnv = gradleEnv."";
pname = args.pname or projectEnv.name;
version = args.version or projectEnv.version;
in
stdenv.mkDerivation (
args // {
stdenv.mkDerivation
(
args // {
inherit pname version;
inherit pname version;
nativeBuildInputs = (args.nativeBuildInputs or []) ++ [ projectEnv.gradle ];
nativeBuildInputs = (args.nativeBuildInputs or [ ]) ++ [ projectEnv.gradle ];
buildPhase = args.buildPhase or ''
runHook preBuild
buildPhase = args.buildPhase or ''
runHook preBuild
(
set -x
env \
"GRADLE_USER_HOME=$(mktemp -d)" \
gradle --offline --no-daemon --no-build-cache \
--info --full-stacktrace --warning-mode=all \
${optionalString enableParallelBuilding "--parallel"} \
${optionalString enableDebug "-Dorg.gradle.debug=true"} \
--init-script ${projectEnv.initScript} \
${concatStringsSep " " gradleFlags}
)
(
set -x
env \
"GRADLE_USER_HOME=$(mktemp -d)" \
gradle --offline --no-daemon --no-build-cache \
--info --full-stacktrace --warning-mode=all \
${optionalString enableParallelBuilding "--parallel"} \
${optionalString enableDebug "-Dorg.gradle.debug=true"} \
--init-script ${projectEnv.initScript} \
${concatStringsSep " " gradleFlags}
)
runHook postBuild
'';
runHook postBuild
'';
dontStrip = true;
}
)
dontStrip = true;
}
)

View file

@ -5,7 +5,7 @@ let
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
tarWrapper = runCommand "tarWrapper" { } ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
@ -20,27 +20,27 @@ let
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
includeDependencies = { dependencies }:
stdenv.lib.optionalString (dependencies != [])
stdenv.lib.optionalString (dependencies != [ ])
(
stdenv.lib.concatMapStrings (
dependency:
stdenv.lib.concatMapStrings
(
dependency:
''
# Bundle the dependencies of the package
mkdir -p node_modules
@ -54,11 +54,11 @@ let
cd ..
''
) dependencies
) dependencies
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
composePackage = { name, packageName, src, dependencies ? [ ], ... }@args:
''
DIR=$(pwd)
cd $TMPDIR
@ -104,7 +104,6 @@ let
cd ..
${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = { dependencies, production }:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
@ -160,10 +159,10 @@ let
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
''
node ${pinpointDependenciesFromPackageJSON} ${ if production then "production" else "development"}
${stdenv.lib.optionalString (dependencies != [])
${stdenv.lib.optionalString (dependencies != [ ])
''
if [ -d node_modules ]
then
@ -172,13 +171,12 @@ let
cd ..
fi
''}
'';
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
pinpointDependenciesOfPackage = { packageName, dependencies ? [ ], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
@ -191,7 +189,7 @@ let
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
nodeSources = runCommand "node-sources" { } ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
@ -318,69 +316,68 @@ let
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
prepareAndInvokeNPM = { packageName, bypassCache, reconstructLock, npmFlags, production }:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${stdenv.lib.optionalString bypassCache ''
${stdenv.lib.optionalString bypassCache ''
${stdenv.lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild
if [ "$dontNpmInstall" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
if [ "$dontNpmInstall" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install
fi
'';
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install
fi
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version
, dependencies ? []
, buildInputs ? []
, dependencies ? [ ]
, buildInputs ? [ ]
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
@ -392,60 +389,61 @@ let
, buildPhase ? "true"
, ...
}@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation (
{
name = "node_${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ stdenv.lib.optional (stdenv.isLinux) utillinux
++ stdenv.lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation
(
{
name = "node_${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ stdenv.lib.optional (stdenv.isLinux) utillinux
++ stdenv.lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
installPhase = ''
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
fi
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
} // extraArgs
);
# Run post install hook, if provided
runHook postInstall
'';
} // extraArgs
);
# Builds a development shell
buildNodeShell =
@ -453,8 +451,8 @@ let
, packageName
, version
, src
, dependencies ? []
, buildInputs ? []
, dependencies ? [ ]
, buildInputs ? [ ]
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
@ -465,10 +463,10 @@ let
, buildPhase ? "true"
, ...
}@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
nodeDependencies = stdenv.mkDerivation (
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
nodeDependencies = stdenv.mkDerivation
(
{
name = "node-dependencies-${name}-${version}";
@ -495,11 +493,11 @@ let
cp ${src}/package.json .
chmod 644 package.json
${stdenv.lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
fi
''}
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
@ -516,28 +514,28 @@ let
'';
} // extraArgs
);
in
stdenv.mkDerivation {
name = "node-shell-${name}-${version}";
in
stdenv.mkDerivation {
name = "node-shell-${name}-${version}";
buildInputs = [ python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
buildInputs = [ python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = stdenv.lib.optionalString (dependencies != []) ''
export NODE_PATH=$nodeDependencies/lib/node_modules
export PATH="$nodeDependencies/bin:$PATH"
'';
};
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = stdenv.lib.optionalString (dependencies != [ ]) ''
export NODE_PATH=$nodeDependencies/lib/node_modules
export PATH="$nodeDependencies/bin:$PATH"
'';
};
in
{
buildNodeSourceDist = stdenv.lib.makeOverridable buildNodeSourceDist;

View file

@ -1,6 +1,6 @@
# This file has been generated by node2nix 1.7.0. Do not edit!
{ nodeEnv, fetchurl, fetchgit, globalBuildInputs ? [] }:
{ nodeEnv, fetchurl, fetchgit, globalBuildInputs ? [ ] }:
let
sources = {
"@babel/code-frame-7.8.3" = {

View file

@ -59,7 +59,7 @@
];
}
)
(pass.withExtensions (ext: [ext.pass-otp ext.pass-genphrase]))
(pass.withExtensions (ext: [ ext.pass-otp ext.pass-genphrase ]))
ripgrep
unzip
];

View file

@ -70,7 +70,13 @@
chromium
citrix_workspace
deluge
firefox
(
firefox.override {
extraNativeMessagingHosts = [
(passff-host.override { pass = (pass.withExtensions (ext: [ ext.pass-otp ext.pass-genphrase ])); })
];
}
)
google-chrome
(gnupg.override { guiSupport = true; })
hledger

Binary file not shown.

View file

@ -1,4 +1,4 @@
with import <nixpkgs> {};
with import <nixpkgs> { };
{ ... }:
let
launcher = import ../sway/launcher.nix { inherit pkgs stdenv; };
@ -26,7 +26,7 @@ in
statusCommand = "${pkgs.i3status-rust}/bin/i3status-rs ${status-configuration}";
}
];
floating.criteria = [ { class = "launcher"; } { class = "accentor.Main"; } ];
floating.criteria = [{ class = "launcher"; } { class = "accentor.Main"; }];
fonts = [ "Fira Code Normal 9" ];
menu = "${pkgs.kitty}/bin/kitty --class launcher -e ${launcher}/bin/launcher";
modifier = "Mod4";

View file

@ -1,4 +1,4 @@
with import <nixpkgs> {};
with import <nixpkgs> { };
{
home-manager.users.charlotte = { pkgs, ... }: {

View file

@ -1,4 +1,4 @@
with import <nixpkgs> {};
with import <nixpkgs> { };
{ pkgs, ... }:
let
launcher = import ./launcher.nix { inherit pkgs stdenv; };

View file

@ -7,7 +7,6 @@ let
type = "gem";
version = "4.0.0.rc2";
};
emoji_list = stdenv.mkDerivation {
name = "emoji_list";
buildInputs = [ pkgs.ruby gemoji ];

View file

@ -48,7 +48,7 @@ pkgs.writeText "configuration.toml" ''
block = "net"
device = "wlp0s20f3"
ssid = true
signal_strength = true
signal_strength = false
speed_up = false
speed_down = false
hide_missing = true

View file

@ -43,6 +43,7 @@
sessionVariables = {
DEFAULT_USER = "charlotte";
EDITOR = "nvim";
PASSWORD_STORE_DIR = "$HOME/repos/passwords";
};
shellAliases = {
upgrade = "sudo nix-channel --update && sudo nixos-rebuild switch";

View file

@ -23,7 +23,6 @@ let
# useGoogleTVAddOns = false;
# includeExtras = [ "extras;google;gcm" ];
};
customPlugins.kotlin-vim = pkgs.vimUtils.buildVimPlugin {
name = "kotlin-vim";
src = pkgs.fetchFromGitHub {
@ -33,7 +32,6 @@ let
sha256 = "1yqzxabhpc4jbdlzhsysp0vi1ayqg0vnpysvx4ynd9961q2fk3sz";
};
};
gradle-fhs-nix = pkgs.writeText "gradle-fhs.nix" ''
{ run }:
let
@ -48,14 +46,12 @@ let
runScript = "bash -c '''''${run}'''";
}).env
'';
gradle-run-script = pkgs.writeScriptBin "gradle" ''
#!${pkgs.bash}/bin/bash
REPO_ROOT="$(git rev-parse --show-toplevel)"
nix-shell --argstr run "\"$REPO_ROOT/gradlew $@\"" "${gradle-fhs-nix}"
'';
sign-release = pkgs.writeScriptBin "sign-release" ''
#!${pkgs.bash}/bin/bash
@ -90,7 +86,7 @@ pkgs.mkShell {
\ }
'';
vam.knownPlugins = baseVimConfig.vam.knownPlugins // customPlugins;
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or []) ++ [
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or [ ]) ++ [
{
names = [
"kotlin-vim"

View file

@ -1,5 +1,5 @@
let
pkgs = import <nixpkgs> {};
pkgs = import <nixpkgs> { };
baseVimConfig = import ../programs/neovim/base.nix { inherit pkgs; };
in
pkgs.mkShell {
@ -21,7 +21,7 @@ pkgs.mkShell {
\ }
'';
vam.knownPlugins = baseVimConfig.vam.knownPlugins;
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or []) ++ [
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or [ ]) ++ [
{
names = [
"LanguageClient-neovim"

View file

@ -1,5 +1,5 @@
let
pkgs = import <nixpkgs> {};
pkgs = import <nixpkgs> { };
baseVimConfig = import ../programs/neovim/base.nix { inherit pkgs; };
nodePackages = import ../packages/node/default.nix { inherit pkgs; };
in
@ -19,7 +19,7 @@ pkgs.mkShell {
\ }
'';
vam.knownPlugins = baseVimConfig.vam.knownPlugins;
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or []) ++ [
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or [ ]) ++ [
{
names = [
"LanguageClient-neovim"

View file

@ -1,5 +1,5 @@
let
pkgs = import <nixpkgs> {};
pkgs = import <nixpkgs> { };
baseVimConfig = import ../programs/neovim/base.nix { inherit pkgs; };
nodePackages = import ../packages/node/default.nix { inherit pkgs; };
in
@ -36,7 +36,7 @@ pkgs.mkShell {
\ }
'';
vam.knownPlugins = baseVimConfig.vam.knownPlugins;
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or []) ++ [
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or [ ]) ++ [
{
names = [
"LanguageClient-neovim"

View file

@ -1,8 +1,7 @@
let
pkgs = import <nixpkgs> {};
pkgs = import <nixpkgs> { };
baseVimConfig = import ../programs/neovim/base.nix { inherit pkgs; };
jdtls = import ../packages/jdtls/default.nix { inherit pkgs; stdenv = pkgs.stdenv; };
extraRpath = pkgs.stdenv.lib.strings.makeLibraryPath (with pkgs; [ ffmpeg ]);
in
pkgs.mkShell {
@ -41,7 +40,7 @@ pkgs.mkShell {
\}
'';
vam.knownPlugins = baseVimConfig.vam.knownPlugins;
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or []) ++ [
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or [ ]) ++ [
{
names = [
"LanguageClient-neovim"

View file

@ -1,5 +1,5 @@
let
pkgs = import <nixpkgs> {};
pkgs = import <nixpkgs> { };
baseVimConfig = import ../programs/neovim/base.nix { inherit pkgs; };
in
pkgs.mkShell {
@ -9,7 +9,7 @@ pkgs.mkShell {
configure = {
customRC = baseVimConfig.customRC;
vam.knownPlugins = baseVimConfig.vam.knownPlugins;
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or []) ++ [ { name = "vim-ledger"; } ];
vam.pluginDictionaries = (baseVimConfig.vam.pluginDictionaries or [ ]) ++ [{ name = "vim-ledger"; }];
};
}
)

View file

@ -1,5 +1,5 @@
let
pkgs = import <nixpkgs> {};
pkgs = import <nixpkgs> { };
in
pkgs.mkShell {
buildInputs = with pkgs; [