Compare commits

..

1 Commits

Author SHA1 Message Date
xenia e98d1584df wip: create dbzfs module 2024-04-10 23:48:00 -04:00
42 changed files with 51 additions and 2732 deletions

196
README.md
View File

@ -1,8 +1,3 @@
---
gitea: none
include_toc: true
---
# dragnpkgs
this is my personal nixos modules and packages repository. while it was designed for my own use,
@ -23,27 +18,6 @@ dragnpkgs provides a set of nixos modules and a nixpkgs overlay containing custo
}
```
alternatively, add it as a nix channel
```bash
nix-channel --add https://git.lain.faith/haskal/dragnpkgs/archive/main.tar.gz dragnpkgs
```
then in system config
```nix
{config, lib, pkgs, ...}:
{
imports = [
<dragnpkgs>
];
}
```
for standalone nix on other distros, use `~/.config/nixpkgs/overlays.nix` to enable the dragnpkgs
overlay
```nix
(import <dragnpkgs> {}).nixpkgs.overlays
```
## options documentation
documentation for options provided by dragnpkgs
@ -82,10 +56,9 @@ the server hostname or IP; this is typically required (by java RMI) for correct
the server will use 3 consecutive TCP ports starting from this port
#### services.ghidra-server.directory (`ghidra-server`)
#### services.ghidra-server.directory (`/var/lib/ghidra-server`)
the root directory for server files, as a subdirectory of `/var/lib`. this is needed because this
option is passed to systemd `StateDirectory=`
the root directory for server files
#### services.ghidra-server.{user,group} (`ghidra`)
@ -96,174 +69,11 @@ the service user and group
## packages documentation
### [`ghidra_headless`](./default.nix)
### `ghidra_headless`
a variant of ghidra built with a headless openjdk, intended to reduce closure size for server
operation
### [`ghidra`](./pkgs/ghidra-xenia/build.nix)
preview version of ghidra with my nix patches
### [`kicad`](./pkgs/kicad-xenia/default.nix)
preview version of kicad with my patches
### [`ocamlPackages.ppx_unicode`](./pkgs/ocaml/ppx_unicode)
opinionated ppx for string literals: <https://git.lain.faith/haskal/ppx_unicode>
### [`ocamlPackages.xlog`](./pkgs/ocaml/xlog)
logging for cats, in ocaml: <https://git.lain.faith/haskal/xlog>
### [`python312Packages.feedvalidator` or `feedvalidator`](./pkgs/python/feedvalidator)
the W3C atom/RSS feed validator library, <https://github.com/w3c/feedvalidator>
this package comes with an additional CLI bin, `feedvalidator`, which is a simple wrapper around the
library that enables CLI usage
usage
```
usage: feedvalidator [-h] [-b BASE] file
W3C feedvalidator
positional arguments:
file File to validate
options:
-h, --help show this help message and exit
-b BASE, --base BASE Base URL of document
```
example
```bash
feedvalidator --base "https://my-base-url/atom.xml" path/to/atom.xml
```
### [`outer-wilds-text-adventure`](./pkgs/games/outer-wilds-text-adventure)
nix packaging for the Outer Wilds text adventure game. it should work by default on NixOS. if using
the nix package manager on a non-NixOS computer, you also need the following when using pipewire or
another ALSA plugin that lives in a separate package
```bash
export ALSA_PLUGIN_DIR=$(nix eval -f '<nixpkgs>' --raw pipewire)/lib/alsa-lib
```
## lib documentation
### [`fetchFromSteam`](./lib/fetchsteam)
a fetcher that downloads binaries from [Steam](https://store.steampowered.com/) using
[DepotDownloader](https://github.com/SteamRE/DepotDownloader). this is intended for game servers
that are distributed via Steam. use [SteamDB](https://steamdb.info) to get the needed IDs.
Usage:
```nix
pkgs.fetchFromSteam {
name = "..."; # optional
appId = "...";
depot = {
depotId = "...";
manifestId = "...";
beta = "..."; # optional
};
additionalDepots = [
# same format as the main `depot`
# use this to include eg the steamworks redistributable depot
];
hash = pkgs.lib.fakeHash;
}
```
### [`fetchb4`](./lib/fetchb4)
A fetcher that uses `b4` to download patchsets from <https://lore.kernel.org> so that they can be applied in `boot.kernelPatches`
Usage:
```nix
pkgs.fetchb4 {
msgid = "2024042069.1337-example@example";
hash = pkgs.lib.fakeHash;
# optional args
version = "3"; # default: latest
single_message = true; # default: false
}
```
note that not specifying a version may make cause future invocations to return different output if a newer version is sent to the thread
### [`mkNginxServer`](./lib/dev-nginx)
creates a shell script that launches nginx in the foreground as the current user. the nginx is
configured to run an http server on `localhost:8080` with the given `siteConfig`
example:
```nix
pkgs.mkNginxServer {
siteConfig = ''
location / {
root path/to/development_site_root;
error_page 404 /404.html;
}
'';
}
```
### [`gitSource`](./lib/git-source)
for development package nix files, computes the source set of files tracked by git at the given root
path
arguments:
- `root`: the root of the git repo, where `.git` is located
- `subdir`, optional: a subdirectory within the git repo. if provided, only files in this
subdirectory will go into the final source set
example:
```nix
stdenv.mkDerivation {
# ...
src = gitSource { root = ./.; };
}
```
### [`makeSquashFs`](./lib/make-squashfs)
builds a squashfs image from the given derivations
example
```nix
makeSquashFs {
filename = "my-image"; # optional
storeContents = [ foo bar ];
}
```
### [`makeHpcDist`](./lib/make-hpc-dist)
create a packaged nix distribution with the given packages in it for weird HPC systems. go read the
source to find out what it does; i don't recommend using this if you're not me
## development
structure of this repo
- `default.nix`: the top level NixOS module, which can also be interpreted as a plain nix file
outside of NixOS for access to just the nixpkgs overlay. this contains all definitions for
packages, library functions, and NixOS modules
- `lib/`: library functions (ie functions that get added to the overlay) go here
- `modules/`: NixOS modules go here
- `pkgs/`: packages that get added to the overlay go here
- `support/`: WIP support tools (eg generating documentation)
## licensing
this repository is NOT licensed under a "standard" FOSS license. instead, it uses

17
TODO.md
View File

@ -1,17 +0,0 @@
# TODO
## upstream
- fix ghidra StartupWMClass
- fix kicad desktop file name
## `ghidra-server`
create NixOS VM test
- test that ghidra starts and the repositories are initialized
- test that ghidra-svrAdmin works as an unprivileged user in the `ghidra` group
- possibly test remotely importing a binary. however, ghidra-svrAdmin working is a good indicator of
the server being functional
## general
- meta info / license info for pkgs

View File

@ -1,48 +1,16 @@
{ ... }:
{ config, lib, pkgs, ... }:
{
imports = [
./modules/dragn-ball-zfs
./modules/ghidra-server
];
nixpkgs.overlays = [
(final: prev: {
fetchFromSteam = prev.callPackage ./lib/fetchsteam {};
fetchb4 = prev.callPackage ./lib/fetchb4 {};
gitSource = prev.callPackage ./lib/git-source {};
makeSquashFs = prev.callPackage ./lib/make-squashfs {};
makeHpcDist = final.callPackage ./lib/make-hpc-dist {};
ghidra_headless = prev.ghidra.override {
openjdk17 = prev.openjdk17_headless;
};
ghidra = final.callPackage ./pkgs/ghidra-xenia/build.nix {
protobuf = final.protobuf_21;
};
ghidra-extensions = final.lib.recurseIntoAttrs (final.callPackage ./pkgs/ghidra-xenia/extensions.nix { });
ghidra-bin = final.callPackage ./pkgs/ghidra-xenia { };
kicad = final.callPackage ./pkgs/kicad-xenia { };
kicadAddons = final.lib.recurseIntoAttrs (final.callPackage ./pkgs/kicad-xenia/addons {});
ocamlPackages = prev.ocamlPackages.overrideScope (ofinal: oprev: {
ppx_unicode = ofinal.callPackage ./pkgs/ocaml/ppx_unicode {};
xlog = ofinal.callPackage ./pkgs/ocaml/xlog {};
});
python312Packages = prev.python312Packages.overrideScope (pfinal: pprev: {
feedvalidator = pfinal.callPackage ./pkgs/python/feedvalidator {};
});
# add to top level because it has a binary
feedvalidator = final.python312Packages.feedvalidator;
outer-wilds-text-adventure = prev.callPackage ./pkgs/games/outer-wilds-text-adventure {};
mkNginxServer = prev.callPackage ./lib/dev-nginx {};
})
];
}

View File

@ -1,46 +0,0 @@
{
writeText,
writeShellScriptBin,
nginx,
systemd
}:
{ siteConfig }:
let conf = writeText "nginx.conf" ''
daemon off;
events {}
pid /tmp/nginx.pid;
http {
access_log /dev/stdout;
client_body_temp_path /tmp;
proxy_temp_path /tmp;
fastcgi_temp_path /tmp;
uwsgi_temp_path /tmp;
scgi_temp_path /tmp;
include ${nginx}/conf/mime.types;
default_type application/octet-stream;
sendfile on;
types_hash_max_size 4096;
types_hash_bucket_size 128;
server {
server_name localhost;
listen 127.0.0.1:8080;
gzip on;
gzip_min_length 256;
gzip_proxied expired no-cache no-store private auth;
gzip_types text/plain application/xml image/svg+xml text/css text/javascript;
${siteConfig}
}
}
'';
in
writeShellScriptBin "dev-nginx.sh" ''
exec ${systemd}/bin/systemd-run --user -t -pPrivateTmp=true --working-directory="$PWD" ${nginx}/bin/nginx -p "$PWD" -e stderr -c ${conf}
''

View File

@ -1,6 +0,0 @@
if [ -e "$NIX_ATTRS_SH_FILE" ]; then . "$NIX_ATTRS_SH_FILE"; elif [ -f .attrs.sh ]; then . .attrs.sh; fi
source $stdenv/setup
echo "Downloading kernel patch $msgid into $out"
export HOME="$TMP"
PYTHONHASHSEED=0 b4 -n am -C -T $b4_flags -o- "$msgid" > "$out"

View File

@ -1,23 +0,0 @@
{ lib, stdenvNoCC, b4, git, cacert }:
{
msgid,
hash,
single_message ? false,
version ? null
}: stdenvNoCC.mkDerivation {
name = "patch-${msgid}";
builder = ./builder.sh;
inherit msgid;
b4_flags = with lib.strings; concatStringsSep " " [
(optionalString single_message "--single-message")
(optionalString (version != null) "--use-version ${version}")
];
nativeBuildInputs = [ b4 git cacert ];
SSL_CERT_FILE = "${cacert}/etc/ssl/certs/ca-bundle.crt";
outputHash = hash;
preferLocalBuild = true;
}

View File

@ -1,13 +0,0 @@
if [ -e "$NIX_ATTRS_SH_FILE" ]; then . "$NIX_ATTRS_SH_FILE"; elif [ -f .attrs.sh ]; then . .attrs.sh; fi
source $stdenv/setup
echo "Downloading Steam depots for appId $appId into $out"
export HOME="$PWD"
mkdir -p "$out"
for args in "${depotArgs[@]}"; do
echo "Downloading component: $args"
DepotDownloader $args -dir "$out" -validate
done
rm -rf "$out/.DepotDownloader"

View File

@ -1,50 +0,0 @@
{ lib, stdenvNoCC, system, cacert, depotdownloader }:
let
checkDepot = depot: with builtins;
hasAttr "depotId" depot && hasAttr "manifestId" depot;
depotFormat = "{ depotId = ...; manifestId = ...; [beta = ...;] }";
in lib.makeOverridable (
{
name ? null,
appId,
depot,
additionalDepots ? [],
hash
}:
if ! checkDepot depot then
throw "Invalid format for depot: must be ${depotFormat}"
else if ! builtins.all checkDepot additionalDepots then
throw "Invalid format for additionalDepots: must be ${depotFormat}"
else
let
depotOs =
if system == "x86_64-linux" then
"linux"
else
throw "fetchFromSteam does not currently support systems other than x86_64-linux";
makeDepotArg = depot:
"-app ${appId} -depot ${depot.depotId} -manifest ${depot.manifestId} -os ${depotOs}"
+ (lib.optionalString (builtins.hasAttr "beta" depot) " -beta ${depot.beta}");
depotArgs = builtins.map makeDepotArg [ depot ] ++ additionalDepots;
in stdenvNoCC.mkDerivation {
name = "steam-depot-${appId}" + (lib.optionalString (name != null) "-${name}");
builder = ./builder.sh;
inherit appId;
inherit depotArgs;
nativeBuildInputs = [
depotdownloader
cacert
];
SSL_CERT_FILE = "${cacert}/etc/ssl/certs/ca-bundle.crt";
# impureEnvVars are not handled here
# if used in an environment with eg, an http proxy, consult DepotDownloader and/or .NET for how to
# configure that sort of thing
outputHashMode = "recursive";
outputHash = hash;
preferLocalBuild = true;
})

View File

@ -1,14 +0,0 @@
{ lib }: { root, subdir ? null }:
let
fs = lib.fileset;
sourceFiles = fs.difference
(fs.gitTracked root)
(fs.fileFilter (file: file.hasExt "nix") root);
finalSourceFiles =
if subdir == null then
sourceFiles
else
fs.intersection sourceFiles subdir;
finalRoot = if subdir == null then root else subdir;
in
fs.toSource { root = finalRoot; fileset = finalSourceFiles; }

View File

@ -1,119 +0,0 @@
{
mkShell,
runCommand,
stdenvNoCC,
vmTools,
writeClosure,
writeText,
bash,
cacert,
coreutils,
lix,
singularity,
makeSquashFs,
diskSize ? 1024,
memSize ? 1024
}:
{
contents,
startupScript ? "exec ${bash}/bin/bash -i"
}:
let
base-container = runCommand "empty.sif.d" {
buildInputs = [ coreutils ];
} ''
mkdir "$out"
cd "$out"
mkdir -p proc sys dev nix etc bin usr/bin .singularity.d
ln -s /etc/sh bin/sh
ln -s /etc/env usr/bin/env
ln -s /etc/runscript .singularity.d/runscript
'';
container-image = vmTools.runInLinuxVM (
runCommand "singularity-empty-image" {
buildInputs = [ base-container singularity ];
} ''
export HOME=/tmp
cp -r "${base-container}" "/tmp/container"
cd "/tmp"
find container -type d -exec chmod 755 {} \;
mkdir -p /var/lib/singularity/mnt/session
echo "root:x:0:0:System administrator:/root:/bin/sh" > /etc/passwd
echo > /etc/resolv.conf
${singularity}/bin/singularity build "$out/empty.sif" "container/"
'');
deps = [ coreutils bash cacert ];
startupScriptFile = writeText "singularity-startup-script" startupScript;
shell = stdenvNoCC.mkDerivation {
name = "shell";
propagatedBuildInputs = deps ++ contents;
unpackPhase = "true";
installPhase = ''
mkdir -p "$out/bin"
printf '#!${bash}/bin/bash\n' > "$out/bin/startup.sh"
export >> "$out/bin/startup.sh"
cat "${startupScriptFile}" >> "$out/bin/startup.sh"
chmod +x "$out/bin/startup.sh"
'';
};
base-etc = runCommand "singularity-etc" {
buildInputs = [ coreutils bash cacert ];
} ''
mkdir "$out"
ln -s "${shell}/bin/startup.sh" "$out/runscript"
ln -s "${bash}/bin/bash" "$out/sh"
ln -s "${coreutils}/bin/env" "$out/env"
mkdir -p "$out/ssl/certs"
ln -s "${cacert}/etc/ssl/certs/ca-bundle.crt" "$out/ssl/certs/ca-bundle.crt"
ln -s "${cacert}/etc/ssl/certs/ca-bundle.crt" "$out/ssl/certs/ca-certificates.crt"
touch "$out/localtime"
touch "$out/resolv.conf"
'';
squashfs = makeSquashFs { filename = "nix-store"; storeContents = [ shell ]; };
startCommand = writeText "run-container.sh" ''
#!/usr/bin/env bash
set -euo pipefail
module load singularity/3.10.3
temp_dir="$(mktemp -d)"
mkdir -p "$TMPDIR/empty"
function __cleanup {
echo cleaning up
rsync -r --delete -- "$TMPDIR/empty/." "$temp_dir/."
rmdir "$temp_dir"
echo done
}
trap __cleanup EXIT
cp -r etc nix-store.squashfs "$temp_dir"
chmod +w "$temp_dir/etc"
chmod +w "$temp_dir/etc/resolv.conf"
chmod +w "$temp_dir/etc/localtime"
cat /etc/localtime > $temp_dir/etc/localtime
cat /etc/resolv.conf > $temp_dir/etc/resolv.conf
singularity run -B "/work:/work,/scratch:/scratch,$temp_dir/nix-store.squashfs:/nix/store:image-src=/,$temp_dir/etc:/etc" --pid --uts --ipc container-base.sif
'';
in runCommand "hpc-files.d" {} ''
mkdir "$out"
cp "${squashfs}" "$out/nix-store.squashfs"
cp -r "${base-etc}" "$out/etc"
cp "${container-image}/empty.sif" "$out/container-base.sif"
cp "${startCommand}" "$out/run-container.sh"
chmod +x "$out/run-container.sh"
''

View File

@ -1,23 +0,0 @@
{
squashfsTools,
closureInfo,
runCommand
}:
{
filename ? "image",
storeContents ? [],
comp ? "xz -Xdict-size 100%"
}:
let
compFlag = if comp == null then "-no-compression" else "-comp ${comp}";
in runCommand "${filename}.squashfs" {
nativeBuildInputs = [ squashfsTools ];
} ''
closureInfo=${closureInfo { rootPaths = storeContents; }}
cp $closureInfo/registration nix-path-registration
mksquashfs nix-path-registration $(cat $closureInfo/store-paths) $out \
-no-hardlinks -keep-as-directory -all-root -b 1048576 ${compFlag} \
-processors $NIX_BUILD_CORES
''

View File

@ -0,0 +1,36 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.dbzfs;
in {
options.dbzfs = {
enable = mkEnableOption "dbzfs" // {
default = true;
};
package = mkPackageOption pkgs "zfs" {};
managedRoot = mkOption {
default = null;
example = literalExpression "\"rpool/data\"";
description = mdDoc "Root dataset which is managed by dbzfs.";
type = types.str;
};
datasets = mkOption {
default = [];
example = literalExpression "TODO";
description = mdDoc "List of datasets to set up with dbzfs";
type = types.listOf types.anything; # todo
};
};
config = mkIf cfg.enable {
assertions = [{
assertion = cfg.managedRoot != null;
message = "dbzfs is enabled but dbzfs.managedRoot is not defined!";
}];
system.activationScripts.dbzfs = ''
echo meow
'';
};
}

View File

@ -8,7 +8,7 @@
}:
let
server_conf = writeText "server.conf" "ghidra.repositories.dir=/var/lib/${directory}/repositories";
server_conf = writeText "server.conf" "ghidra.repositories.dir=${directory}/repositories";
in writeShellScriptBin "ghidra-svrAdmin" ''
exec ${jdkPackage}/bin/java \
-cp ${package}/lib/ghidra/Ghidra/Framework/Utility/lib/Utility.jar \

View File

@ -20,33 +20,31 @@ in {
};
basePort = mkOption {
default = 13100;
description = "Ghidra server base port - the server will use 3 consecutive TCP ports starting from the provided port number.";
description = mdDoc "Ghidra server base port - the server will use 3 consecutive TCP ports starting from the provided port number.";
type = types.port;
};
directory = mkOption {
default = "ghidra-server";
description = ''
Directory for Ghidra server data, under `/var/lib` (for systemd `StateDirectory`)
'';
default = "/var/lib/ghidra-server";
description = mdDoc "Directory for Ghidra server data.";
type = types.str;
};
user = mkOption {
type = types.str;
default = "ghidra";
description = "User account under which ghidra server runs.";
description = mdDoc "User account under which ghidra server runs.";
};
group = mkOption {
type = types.str;
default = "ghidra";
description = "Group account under which ghidra server runs.";
description = mdDoc "Group account under which ghidra server runs.";
};
};
config = mkIf cfg.enable {
users.users."${cfg.user}" = {
isSystemUser = true;
home = "/var/lib/${cfg.directory}";
inherit (cfg) group;
home = cfg.directory;
group = cfg.group;
packages = [ cfg.package cfg.jdkPackage ];
};
@ -63,13 +61,13 @@ in {
paths = map head (filter isList inputSplit);
in ghidra_home + (concatStringsSep (":" + ghidra_home) paths);
ghidra_mainclass = "ghidra.server.remote.GhidraServer";
ghidra_args = "-a0 -u -p${toString cfg.basePort} -ip ${cfg.host} /var/lib/${cfg.directory}/repositories";
ghidra_args = "-a0 -u -p${toString cfg.basePort} -ip ${cfg.host} ${cfg.directory}/repositories";
in {
description = "Ghidra server";
after = ["network.target"];
serviceConfig = {
ExecStart = "${cfg.jdkPackage}/bin/java ${ghidra_java_opt} -classpath ${ghidra_classpath} ${ghidra_mainclass} ${ghidra_args}";
WorkingDirectory = "/var/lib/${cfg.directory}";
WorkingDirectory = cfg.directory;
Environment = "GHIDRA_HOME=${ghidra_home}";
User = cfg.user;
Group = cfg.group;

View File

@ -1,61 +0,0 @@
{
lib,
fetchzip,
stdenvNoCC,
bash,
jdk11,
jogl
}:
let
jdk = jdk11;
joglJarFiles = [
"gluegen-rt-natives-linux-amd64.jar"
"gluegen-rt.jar"
"jogl-all-natives-linux-amd64.jar"
"jogl-all.jar"
"nativewindow-awt.jar"
"nativewindow-natives-linux-amd64.jar"
"nativewindow-os-drm.jar"
"nativewindow-os-x11.jar"
"nativewindow.jar"
];
joglJars = lib.strings.concatMapStringsSep ":" (f: "${jogl}/share/java/${f}") joglJarFiles;
in stdenvNoCC.mkDerivation rec {
pname = "outer-wilds-text-adventure";
version = "1.0";
src = fetchzip {
url = "https://www.mobiusdigitalgames.com/uploads/4/7/3/2/47328935/outerwildstextadventure.application.windows64.zip";
hash = "sha256-DZWjAQmraphpBQEKzMWa327DWA3bc8fiSocHe4hF06k=";
};
propagatedNativeBuildInputs = [ jdk jogl ];
installPhase = ''
mkdir -p $out
cp -r data $out/data
mkdir -p $out/share
cp -r source $out/share
mkdir -p $out/share/doc
cp SomeContextForTheThingYouJustDownloaded.txt $out/share/doc/README.txt
mkdir -p $out/lib
for file in core.jar jl1.0.1.jar jsminim.jar minim.jar mp3spi1.9.5.jar OuterWilds_TextAdventure.jar tritonus_aos.jar tritonus_share.jar; do
cp "lib/$file" $out/lib
done
mkdir -p $out/bin
cat > $out/bin/outer-wilds-text-adventure <<EOF
#!${bash}/bin/bash
cd $out
exec ${jdk}/bin/java -Djna.nosys=true -Djava.library.path=$out/lib -cp "$out/lib/OuterWilds_TextAdventure.jar:$out/lib/core.jar:$out/lib/jl1.0.1.jar:$out/lib/jsminim.jar:$out/lib/minim.jar:$out/lib/mp3spi1.9.5.jar:$out/lib/tritonus_aos.jar:$out/lib/tritonus_share.jar:${joglJars}" OuterWilds_TextAdventure
EOF
chmod +x $out/bin/outer-wilds-text-adventure
'';
meta = with lib; {
description = "Outer Wilds: A Thrilling Graphical Text Adventure";
homepage = "https://www.mobiusdigitalgames.com/outer-wilds-text-adventure.html";
license = licenses.unfree;
platforms = [ "x86_64-linux" ];
};
}

View File

@ -1,74 +0,0 @@
{
lib,
stdenv,
fetchFromSteam,
SDL2
}:
let
appId = "1690800";
buildId = "15636842";
steamworks_sdk = fetchFromSteam {
name = "steamworks-sdk";
inherit appId;
depot = {
depotId = "1006";
manifestId = "7138471031118904166";
};
hash = "sha256-OtPI1kAx6+9G09IEr2kYchyvxlPl3rzx/ai/xEVG4oM=";
};
server_dist = fetchFromSteam {
name = "satisfactory-dedicated-server";
inherit appId;
depot = {
depotId = "1690802";
manifestId = "1910179703516567959";
};
hash = "sha256-TxPegZFAwiAzuHgw9xLGr5sAP7KAVMMfPFYL7TRX1O0=";
};
in stdenv.mkDerivation {
pname = "satisfactory-dedicated-server";
version = "build-${buildId}";
src = server_dist;
buildInputs = [ steamworks_sdk ];
propagatedBuildInputs = [ SDL2 ];
dontConfigure = true;
dontBuild = true;
installPhase = ''
mkdir -p $out
cp -r . $out/.
cp -r ${steamworks_sdk}/linux64 $out
mkdir -p $out/FactoryGame/Intermediate
mkdir -p $out/FactoryGame/Saved
rm $out/FactoryServer.sh
'';
dontStrip = true;
dontPatchELF = true;
dontPatchShebangs = true;
dontPruneLibtoolFiles = true;
preFixup = ''
echo patching binaries
chmod +x $out/Engine/Binaries/Linux/FactoryServer-Linux-Shipping
patchelf --add-needed ${SDL2}/lib/libSDL2-2.0.so.0 \
$out/linux64/steamclient.so
patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" \
--add-needed $out/linux64/steamclient.so \
$out/Engine/Binaries/Linux/FactoryServer-Linux-Shipping
'';
meta = with lib; {
description = "Satisfactory Dedicated Server";
license = licenses.unfree;
platforms = [ "x86_64-linux" ];
};
}

View File

@ -1 +0,0 @@
/nix/store/j0r1vyd1hd43rjzaday70wny2lhjkc1p-satisfactory-dedicated-server-build-15636842

View File

@ -1,214 +0,0 @@
From ffb6777d58f068db7e14372415154cd93f77766e Mon Sep 17 00:00:00 2001
From: roblabla <unfiltered@roblab.la>
Date: Wed, 31 Jan 2024 13:19:55 +0100
Subject: [PATCH] Use com.google.protobuf:protobuf-gradle-plugin
---
Ghidra/Debug/Debugger-gadp/build.gradle | 7 +-
Ghidra/Debug/Debugger-isf/build.gradle | 8 +-
Ghidra/Debug/Debugger-rmi-trace/build.gradle | 14 +--
build.gradle | 6 ++
gradle/debugger/hasProtobuf.gradle | 94 --------------------
5 files changed, 26 insertions(+), 103 deletions(-)
diff --git a/Ghidra/Debug/Debugger-gadp/build.gradle b/Ghidra/Debug/Debugger-gadp/build.gradle
index 9e1c57faf..3a3242eb5 100644
--- a/Ghidra/Debug/Debugger-gadp/build.gradle
+++ b/Ghidra/Debug/Debugger-gadp/build.gradle
@@ -18,11 +18,16 @@ apply from: "${rootProject.projectDir}/gradle/javaProject.gradle"
apply from: "${rootProject.projectDir}/gradle/jacocoProject.gradle"
apply from: "${rootProject.projectDir}/gradle/javaTestProject.gradle"
apply from: "${rootProject.projectDir}/gradle/distributableGhidraModule.gradle"
-apply from: "${rootProject.projectDir}/gradle/debugger/hasProtobuf.gradle"
+apply plugin: 'com.google.protobuf'
apply plugin: 'eclipse'
eclipse.project.name = 'Debug Debugger-gadp'
+buildscript {
+ dependencies {
+ classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.18'
+ }
+}
dependencies {
api project(':Framework-AsyncComm')
api project(':Framework-Debugging')
diff --git a/Ghidra/Debug/Debugger-isf/build.gradle b/Ghidra/Debug/Debugger-isf/build.gradle
index d135294a0..785681ca2 100644
--- a/Ghidra/Debug/Debugger-isf/build.gradle
+++ b/Ghidra/Debug/Debugger-isf/build.gradle
@@ -18,11 +18,15 @@ apply from: "${rootProject.projectDir}/gradle/javaProject.gradle"
apply from: "${rootProject.projectDir}/gradle/jacocoProject.gradle"
apply from: "${rootProject.projectDir}/gradle/javaTestProject.gradle"
apply from: "${rootProject.projectDir}/gradle/distributableGhidraModule.gradle"
-apply from: "${rootProject.projectDir}/gradle/debugger/hasProtobuf.gradle"
-
+apply plugin: 'com.google.protobuf'
apply plugin: 'eclipse'
eclipse.project.name = 'Debug Debugger-isf'
+buildscript {
+ dependencies {
+ classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.18'
+ }
+}
dependencies {
api project(':Framework-AsyncComm')
api project(':Framework-Debugging')
diff --git a/Ghidra/Debug/Debugger-rmi-trace/build.gradle b/Ghidra/Debug/Debugger-rmi-trace/build.gradle
index 40fbc17ab..7517ffe6e 100644
--- a/Ghidra/Debug/Debugger-rmi-trace/build.gradle
+++ b/Ghidra/Debug/Debugger-rmi-trace/build.gradle
@@ -18,12 +18,17 @@ apply from: "${rootProject.projectDir}/gradle/javaProject.gradle"
apply from: "${rootProject.projectDir}/gradle/jacocoProject.gradle"
apply from: "${rootProject.projectDir}/gradle/javaTestProject.gradle"
apply from: "${rootProject.projectDir}/gradle/distributableGhidraModule.gradle"
-apply from: "${rootProject.projectDir}/gradle/debugger/hasProtobuf.gradle"
+apply plugin: 'com.google.protobuf'
apply from: "${rootProject.projectDir}/gradle/debugger/hasPythonPackage.gradle"
apply plugin: 'eclipse'
eclipse.project.name = 'Debug Debugger-rmi-trace'
+buildscript {
+ dependencies {
+ classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.18'
+ }
+}
dependencies {
api project(':Pty')
api project(':Debugger')
@@ -44,12 +49,9 @@ task generateProtoPy {
ext.outdir = file("build/generated/source/proto/main/py")
outputs.dir(outdir)
inputs.files(src)
- dependsOn(configurations.protocArtifact)
+ dependsOn(protobuf.generateProtoTasks.all())
doLast {
- def exe = configurations.protocArtifact.first()
- if (!isCurrentWindows()) {
- exe.setExecutable(true)
- }
+ def exe = protobuf.tools.protoc.path
exec {
commandLine exe, "--python_out=$outdir", "-I$srcdir"
args src
diff --git a/build.gradle b/build.gradle
index b0c717fb1..5f56506a5 100644
--- a/build.gradle
+++ b/build.gradle
@@ -74,6 +74,12 @@ if (flatRepo.isDirectory()) {
jcenter()
flatDir name: "flat", dirs:["$flatRepo"]
}
+ buildscript {
+ repositories {
+ mavenLocal()
+ mavenCentral()
+ }
+ }
}
}
else {
diff --git a/gradle/debugger/hasProtobuf.gradle b/gradle/debugger/hasProtobuf.gradle
index 23b4ce74b..e69de29bb 100644
--- a/gradle/debugger/hasProtobuf.gradle
+++ b/gradle/debugger/hasProtobuf.gradle
@@ -1,94 +0,0 @@
-/* ###
- * IP: GHIDRA
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/*plugins {
- id 'com.google.protobuf' version '0.8.10'
-}*/
-
-configurations {
- allProtocArtifacts
- protocArtifact
-}
-
-def platform = getCurrentPlatformName()
-
-
-dependencies {
- allProtocArtifacts 'com.google.protobuf:protoc:3.21.8:windows-x86_64@exe'
- allProtocArtifacts 'com.google.protobuf:protoc:3.21.8:linux-x86_64@exe'
- allProtocArtifacts 'com.google.protobuf:protoc:3.21.8:linux-aarch_64@exe'
- allProtocArtifacts 'com.google.protobuf:protoc:3.21.8:osx-x86_64@exe'
- allProtocArtifacts 'com.google.protobuf:protoc:3.21.8:osx-aarch_64@exe'
-
- if (isCurrentWindows()) {
- protocArtifact 'com.google.protobuf:protoc:3.21.8:windows-x86_64@exe'
- }
- if (isCurrentLinux()) {
- if (platform.endsWith("x86_64")) {
- protocArtifact 'com.google.protobuf:protoc:3.21.8:linux-x86_64@exe'
- }
- else {
- protocArtifact 'com.google.protobuf:protoc:3.21.8:linux-aarch_64@exe'
- }
- }
- if (isCurrentMac()) {
- if (platform.endsWith("x86_64")) {
- protocArtifact 'com.google.protobuf:protoc:3.21.8:osx-x86_64@exe'
- }
- else {
- protocArtifact 'com.google.protobuf:protoc:3.21.8:osx-aarch_64@exe'
- }
- }
-}
-
-/*protobuf {
- protoc {
- artifact = 'com.google.protobuf:protoc:3.21.8'
- }
-}*/
-
-task generateProto {
- ext.srcdir = file("src/main/proto")
- ext.src = fileTree(srcdir) {
- include "**/*.proto"
- }
- ext.outdir = file("build/generated/source/proto/main/java")
- outputs.dir(outdir)
- inputs.files(src)
- dependsOn(configurations.protocArtifact)
- doLast {
- def exe = configurations.protocArtifact.first()
- if (!isCurrentWindows()) {
- exe.setExecutable(true)
- }
- exec {
- commandLine exe, "--java_out=$outdir", "-I$srcdir"
- args src
- }
- }
-}
-
-tasks.compileJava.dependsOn(tasks.generateProto)
-tasks.eclipse.dependsOn(tasks.generateProto)
-rootProject.tasks.prepDev.dependsOn(tasks.generateProto)
-
-sourceSets {
- main {
- java {
- srcDir tasks.generateProto.outdir
- }
- }
-}
-zipSourceSubproject.dependsOn generateProto
--
2.42.0

View File

@ -1,15 +0,0 @@
diff --git a/Ghidra/Framework/Utility/src/main/java/utility/application/ApplicationUtilities.java b/Ghidra/Framework/Utility/src/main/java/utility/application/ApplicationUtilities.java
index ea12a661f0..da7779b07f 100644
--- a/Ghidra/Framework/Utility/src/main/java/utility/application/ApplicationUtilities.java
+++ b/Ghidra/Framework/Utility/src/main/java/utility/application/ApplicationUtilities.java
@@ -36,6 +36,10 @@ public class ApplicationUtilities {
*/
public static Collection<ResourceFile> findDefaultApplicationRootDirs() {
Collection<ResourceFile> applicationRootDirs = new ArrayList<>();
+ String nixGhidraHome = System.getenv("NIX_GHIDRAHOME");
+ if (nixGhidraHome != null) {
+ applicationRootDirs.add(new ResourceFile(nixGhidraHome));
+ };
ResourceFile applicationRootDir = findPrimaryApplicationRootDir();
if (applicationRootDir != null) {
applicationRootDirs.add(applicationRootDir);

View File

@ -1,26 +0,0 @@
diff --git a/Ghidra/RuntimeScripts/Common/support/buildExtension.gradle b/Ghidra/RuntimeScripts/Common/support/buildExtension.gradle
index bc194f219..94b00fabd 100644
--- a/Ghidra/RuntimeScripts/Common/support/buildExtension.gradle
+++ b/Ghidra/RuntimeScripts/Common/support/buildExtension.gradle
@@ -82,7 +82,7 @@ dependencies {
helpPath fileTree(dir: ghidraDir + '/Features/Base', include: "**/Base.jar")
}
-def ZIP_NAME_PREFIX = "${DISTRO_PREFIX}_${RELEASE_NAME}_${getCurrentDate()}"
+def ZIP_NAME_PREFIX = "${DISTRO_PREFIX}_${RELEASE_NAME}"
def DISTRIBUTION_DIR = file("dist")
def pathInZip = "${project.name}"
diff --git a/gradle/root/distribution.gradle b/gradle/root/distribution.gradle
index f44c8267b..f6231c417 100644
--- a/gradle/root/distribution.gradle
+++ b/gradle/root/distribution.gradle
@@ -32,7 +32,7 @@ apply from: "$rootProject.projectDir/gradle/support/sbom.gradle"
def currentPlatform = getCurrentPlatformName()
def PROJECT_DIR = file (rootProject.projectDir.absolutePath)
ext.DISTRIBUTION_DIR = file("$buildDir/dist")
-ext.ZIP_NAME_PREFIX = "${rootProject.DISTRO_PREFIX}_${rootProject.BUILD_DATE_SHORT}"
+ext.ZIP_NAME_PREFIX = "${rootProject.DISTRO_PREFIX}"
ext.ZIP_DIR_PREFIX = "${rootProject.DISTRO_PREFIX}"
ext.ALL_REPOS = [rootProject.file('.').getName()]

View File

@ -1,78 +0,0 @@
{ lib
, stdenv
, unzip
, jdk
, gradle
, ghidra
}:
let
metaCommon = oldMeta:
oldMeta // (with lib; {
maintainers = (oldMeta.maintainers or []) ++ (with maintainers; [ vringar ]);
platforms = oldMeta.platforms or ghidra.meta.platforms;
});
buildGhidraExtension = {
pname, nativeBuildInputs ? [], meta ? { }, ...
}@args:
stdenv.mkDerivation (args // {
nativeBuildInputs = nativeBuildInputs ++ [
unzip
jdk
gradle
];
buildPhase = args.buildPhase or ''
runHook preBuild
# Set project name, otherwise defaults to directory name
echo -e '\nrootProject.name = "${pname}"' >> settings.gradle
export GRADLE_USER_HOME=$(mktemp -d)
gradle \
--offline \
--no-daemon \
-PGHIDRA_INSTALL_DIR=${ghidra}/lib/ghidra
runHook postBuild
'';
installPhase = args.installPhase or ''
runHook preInstall
mkdir -p $out/lib/ghidra/Ghidra/Extensions
unzip -d $out/lib/ghidra/Ghidra/Extensions dist/*.zip
runHook postInstall
'';
meta = metaCommon meta;
});
buildGhidraScripts = { pname, meta ? { }, ... }@args:
stdenv.mkDerivation (args // {
installPhase = ''
runHook preInstall
GHIDRA_HOME=$out/lib/ghidra/Ghidra/Extensions/${pname}
mkdir -p $GHIDRA_HOME
cp -r . $GHIDRA_HOME/ghidra_scripts
touch $GHIDRA_HOME/Module.manifest
cat <<'EOF' > extension.properties
name=${pname}
description=${meta.description or ""}
author=
createdOn=
version=${lib.getVersion ghidra}
EOF
runHook postInstall
'';
meta = metaCommon meta;
});
in
{ inherit buildGhidraExtension buildGhidraScripts; }

View File

@ -1,266 +0,0 @@
{
stdenv,
fetchFromGitHub,
lib,
callPackage,
gradle_7,
perl,
makeBinaryWrapper,
openjdk17,
unzip,
makeDesktopItem,
copyDesktopItems,
desktopToDarwinBundle,
xcbuild,
protobuf,
ghidra-extensions,
python3,
python3Packages,
}:
let
pkg_path = "$out/lib/ghidra";
pname = "ghidra";
version = "11.1.1";
releaseName = "NIX";
distroPrefix = "ghidra_${version}_${releaseName}";
src = fetchFromGitHub {
owner = "NationalSecurityAgency";
repo = "Ghidra";
rev = "Ghidra_${version}_build";
hash = "sha256-t96FcAK3JwO66dOf4OhpOfU8CQfAczfF61Cg7m+B3fA=";
# populate values that require us to use git. By doing this in postFetch we
# can delete .git afterwards and maintain better reproducibility of the src.
leaveDotGit = true;
postFetch = ''
cd "$out"
git rev-parse HEAD > $out/COMMIT
# 1970-Jan-01
date -u -d "@$(git log -1 --pretty=%ct)" "+%Y-%b-%d" > $out/SOURCE_DATE_EPOCH
# 19700101
date -u -d "@$(git log -1 --pretty=%ct)" "+%Y%m%d" > $out/SOURCE_DATE_EPOCH_SHORT
find "$out" -name .git -print0 | xargs -0 rm -rf
'';
};
gradle = gradle_7;
patches = [
# Use our own protoc binary instead of the prebuilt one
./0001-Use-protobuf-gradle-plugin.patch
# Override installation directory to allow loading extensions
./0002-Load-nix-extensions.patch
# Remove build dates from output filenames for easier reference
./0003-Remove-build-datestamp.patch
];
postPatch = ''
# Set name of release (eg. PUBLIC, DEV, etc.)
sed -i -e 's/application\.release\.name=.*/application.release.name=${releaseName}/' Ghidra/application.properties
# Set build date and git revision
echo "application.build.date=$(cat SOURCE_DATE_EPOCH)" >> Ghidra/application.properties
echo "application.build.date.short=$(cat SOURCE_DATE_EPOCH_SHORT)" >> Ghidra/application.properties
echo "application.revision.ghidra=$(cat COMMIT)" >> Ghidra/application.properties
# Tells ghidra to use our own protoc binary instead of the prebuilt one.
cat >>Ghidra/Debug/Debugger-gadp/build.gradle <<HERE
protobuf {
protoc {
path = '${protobuf}/bin/protoc'
}
}
HERE
'';
# Adds a gradle step that downloads all the dependencies to the gradle cache.
addResolveStep = ''
cat >>build.gradle <<HERE
task resolveDependencies {
doLast {
project.rootProject.allprojects.each { subProject ->
subProject.buildscript.configurations.each { configuration ->
resolveConfiguration(subProject, configuration, "buildscript config \''${configuration.name}")
}
subProject.configurations.each { configuration ->
resolveConfiguration(subProject, configuration, "config \''${configuration.name}")
}
}
}
}
void resolveConfiguration(subProject, configuration, name) {
if (configuration.canBeResolved) {
logger.info("Resolving project {} {}", subProject.name, name)
configuration.resolve()
}
}
HERE
'';
# fake build to pre-download deps into fixed-output derivation
# Taken from mindustry derivation.
deps = stdenv.mkDerivation {
pname = "${pname}-deps";
inherit version src patches;
postPatch = addResolveStep;
nativeBuildInputs = [
gradle
perl
] ++ lib.optional stdenv.isDarwin xcbuild;
buildPhase = ''
runHook preBuild
export HOME="$NIX_BUILD_TOP/home"
mkdir -p "$HOME"
export JAVA_TOOL_OPTIONS="-Duser.home='$HOME'"
export GRADLE_USER_HOME="$HOME/.gradle"
# First, fetch the static dependencies.
gradle --no-daemon --info -Dorg.gradle.java.home=${openjdk17} -I gradle/support/fetchDependencies.gradle init
# Then, fetch the maven dependencies.
gradle --no-daemon --info -Dorg.gradle.java.home=${openjdk17} resolveDependencies
runHook postBuild
'';
# perl code mavenizes pathes (com.squareup.okio/okio/1.13.0/a9283170b7305c8d92d25aff02a6ab7e45d06cbe/okio-1.13.0.jar -> com/squareup/okio/okio/1.13.0/okio-1.13.0.jar)
installPhase = ''
runHook preInstall
find $GRADLE_USER_HOME/caches/modules-2 -type f -regex '.*\.\(jar\|pom\)' \
| perl -pe 's#(.*/([^/]+)/([^/]+)/([^/]+)/[0-9a-f]{30,40}/([^/\s]+))$# ($x = $2) =~ tr|\.|/|; "install -Dm444 $1 \$out/maven/$x/$3/$4/$5" #e' \
| sh
cp -r dependencies $out/dependencies
runHook postInstall
'';
outputHashAlgo = "sha256";
outputHashMode = "recursive";
outputHash = "sha256-66gL4UFlBUo2JIEOXoF6tFvXtBdEX4b2MeSrV1b6Vg4=";
};
in
stdenv.mkDerivation (finalAttrs: {
inherit
pname
version
src
patches
postPatch
;
# Don't create .orig files if the patch isn't an exact match.
patchFlags = [
"--no-backup-if-mismatch"
"-p1"
];
desktopItems = [
(makeDesktopItem {
name = "ghidra";
exec = "ghidra";
icon = "ghidra";
desktopName = "Ghidra";
genericName = "Ghidra Software Reverse Engineering Suite";
categories = [ "Development" ];
terminal = false;
startupWMClass = "ghidra-Ghidra";
})
];
nativeBuildInputs =
[
gradle
unzip
makeBinaryWrapper
copyDesktopItems
protobuf
python3
python3Packages.pip
]
++ lib.optionals stdenv.isDarwin [
xcbuild
desktopToDarwinBundle
];
dontStrip = true;
__darwinAllowLocalNetworking = true;
buildPhase = ''
runHook preBuild
export HOME="$NIX_BUILD_TOP/home"
mkdir -p "$HOME"
export JAVA_TOOL_OPTIONS="-Duser.home='$HOME'"
ln -s ${deps}/dependencies dependencies
sed -i "s#mavenLocal()#mavenLocal(); maven { url '${deps}/maven' }#g" build.gradle
gradle --offline --no-daemon --info -Dorg.gradle.java.home=${openjdk17} buildGhidra
runHook postBuild
'';
installPhase = ''
runHook preInstall
mkdir -p "${pkg_path}" "$out/share/applications"
ZIP=build/dist/$(ls build/dist)
echo $ZIP
unzip $ZIP -d ${pkg_path}
f=("${pkg_path}"/*)
mv "${pkg_path}"/*/* "${pkg_path}"
rmdir "''${f[@]}"
for f in Ghidra/Framework/Gui/src/main/resources/images/GhidraIcon*.png; do
res=$(basename "$f" ".png" | cut -d"_" -f3 | cut -c11-)
install -Dm444 "$f" "$out/share/icons/hicolor/''${res}x''${res}/apps/ghidra.png"
done;
# improved macOS icon support
install -Dm444 Ghidra/Framework/Gui/src/main/resources/images/GhidraIcon64.png $out/share/icons/hicolor/32x32@2/apps/ghidra.png
runHook postInstall
'';
postFixup = ''
mkdir -p "$out/bin"
ln -s "${pkg_path}/ghidraRun" "$out/bin/ghidra"
wrapProgram "${pkg_path}/support/launch.sh" \
--set-default NIX_GHIDRAHOME "${pkg_path}/Ghidra" \
--prefix PATH : ${lib.makeBinPath [ openjdk17 ]}
'';
passthru = {
inherit releaseName distroPrefix;
inherit (ghidra-extensions.override { ghidra = finalAttrs.finalPackage; })
buildGhidraExtension
buildGhidraScripts
;
withExtensions = callPackage ./with-extensions.nix { ghidra = finalAttrs.finalPackage; };
};
meta = with lib; {
changelog = "https://htmlpreview.github.io/?https://github.com/NationalSecurityAgency/ghidra/blob/Ghidra_${finalAttrs.version}_build/Ghidra/Configurations/Public_Release/src/global/docs/ChangeHistory.html";
description = "Software reverse engineering (SRE) suite of tools";
mainProgram = "ghidra";
homepage = "https://ghidra-sre.org/";
platforms = [
"x86_64-linux"
"aarch64-linux"
"x86_64-darwin"
"aarch64-darwin"
];
sourceProvenance = with sourceTypes; [
fromSource
binaryBytecode # deps
];
license = licenses.asl20;
maintainers = with maintainers; [
roblabla
vringar
];
broken = stdenv.isDarwin && stdenv.isx86_64;
};
})

View File

@ -1,83 +0,0 @@
{ stdenv
, fetchzip
, lib
, makeWrapper
, autoPatchelfHook
, openjdk17
, pam
, makeDesktopItem
, icoutils
}:
let
pkg_path = "$out/lib/ghidra";
desktopItem = makeDesktopItem {
name = "ghidra";
exec = "ghidra";
icon = "ghidra";
desktopName = "Ghidra";
genericName = "Ghidra Software Reverse Engineering Suite";
categories = [ "Development" ];
terminal = false;
startupWMClass = "ghidra-Ghidra";
};
in stdenv.mkDerivation rec {
pname = "ghidra";
version = "10.4";
versiondate = "20230928";
src = fetchzip {
url = "https://github.com/NationalSecurityAgency/ghidra/releases/download/Ghidra_${version}_build/ghidra_${version}_PUBLIC_${versiondate}.zip";
hash = "sha256-IiAQ9OKmr8ZgqmGftuW0ITdG06fb9Lr30n2H9GArctk=";
};
nativeBuildInputs = [
makeWrapper
icoutils
]
++ lib.optionals stdenv.isLinux [ autoPatchelfHook ];
buildInputs = [
stdenv.cc.cc.lib
pam
];
dontStrip = true;
installPhase = ''
mkdir -p "${pkg_path}"
mkdir -p "${pkg_path}" "$out/share/applications"
cp -a * "${pkg_path}"
ln -s ${desktopItem}/share/applications/* $out/share/applications
icotool -x "${pkg_path}/support/ghidra.ico"
rm ghidra_4_40x40x32.png
for f in ghidra_*.png; do
res=$(basename "$f" ".png" | cut -d"_" -f3 | cut -d"x" -f1-2)
mkdir -pv "$out/share/icons/hicolor/$res/apps"
mv "$f" "$out/share/icons/hicolor/$res/apps/ghidra.png"
done;
'';
postFixup = ''
mkdir -p "$out/bin"
ln -s "${pkg_path}/ghidraRun" "$out/bin/ghidra"
wrapProgram "${pkg_path}/support/launch.sh" \
--prefix PATH : ${lib.makeBinPath [ openjdk17 ]}
'';
meta = with lib; {
description = "Software reverse engineering (SRE) suite of tools developed by NSA's Research Directorate in support of the Cybersecurity mission";
mainProgram = "ghidra";
homepage = "https://github.com/NationalSecurityAgency/ghidra";
platforms = [ "x86_64-linux" "x86_64-darwin" ];
sourceProvenance = with sourceTypes; [ binaryBytecode ];
license = licenses.asl20;
maintainers = with maintainers; [ ck3d govanify mic92 ];
};
}

View File

@ -1,14 +0,0 @@
{ lib, newScope, callPackage, ghidra }:
lib.makeScope newScope (self: {
inherit (callPackage ./build-extension.nix { inherit ghidra; }) buildGhidraExtension buildGhidraScripts;
ghidraninja-ghidra-scripts = self.callPackage ./extensions/ghidraninja-ghidra-scripts { };
gnudisassembler = self.callPackage ./extensions/gnudisassembler { inherit ghidra; };
machinelearning = self.callPackage ./extensions/machinelearning { inherit ghidra; };
sleighdevtools = self.callPackage ./extensions/sleighdevtools { inherit ghidra; };
})

View File

@ -1,36 +0,0 @@
{ lib
, fetchFromGitHub
, buildGhidraScripts
, binwalk
, swift
, yara
}:
buildGhidraScripts {
pname = "ghidraninja-ghidra-scripts";
version = "unstable-2020-10-07";
src = fetchFromGitHub {
owner = "ghidraninja";
repo = "ghidra_scripts";
rev = "99f2a8644a29479618f51e2d4e28f10ba5e9ac48";
sha256 = "aElx0mp66/OHQRfXwTkqdLL0gT2T/yL00bOobYleME8=";
};
postPatch = ''
# Replace subprocesses with store versions
substituteInPlace binwalk.py --replace-fail 'subprocess.call(["binwalk"' 'subprocess.call(["${binwalk}/bin/binwalk"'
substituteInPlace swift_demangler.py --replace-fail '"swift"' '"${swift}/bin/swift"'
substituteInPlace yara.py --replace-fail 'subprocess.check_output(["yara"' 'subprocess.check_output(["${yara}/bin/yara"'
substituteInPlace YaraSearch.py --replace-fail '"yara "' '"${yara}/bin/yara "'
'';
meta = with lib; {
description = "Scripts for the Ghidra software reverse engineering suite";
homepage = "https://github.com/ghidraninja/ghidra_scripts";
license = with licenses; [
gpl3Only
gpl2Only
];
};
}

View File

@ -1,71 +0,0 @@
{ lib
, stdenv
, fetchurl
, buildGhidraExtension
, ghidra
, flex
, bison
, texinfo
, perl
, zlib
, xcbuild
}:
let
# Incorporates source from binutils
# https://github.com/NationalSecurityAgency/ghidra/blob/7ab9bf6abffb6938d61d072040fc34ad3331332b/GPL/GnuDisassembler/build.gradle#L34-L35
binutils-version = "2.41";
binutils-src = fetchurl {
url = "mirror://gnu/binutils/binutils-${binutils-version}.tar.bz2";
sha256 = "sha256-pMS+wFL3uDcAJOYDieGUN38/SLVmGEGOpRBn9nqqsws=";
};
in
buildGhidraExtension {
pname = "gnudisassembler";
version = lib.getVersion ghidra;
src = "${ghidra}/lib/ghidra/Extensions/Ghidra/${ghidra.distroPrefix}_GnuDisassembler.zip";
postPatch = ''
ln -s ${binutils-src} binutils-${binutils-version}.tar.bz2
'';
# Don't modify ELF stub resources
dontPatchELF = true;
dontStrip = true;
__darwinAllowLocalNetworking = true;
nativeBuildInputs = [
flex
bison
texinfo
perl
] ++ lib.optionals stdenv.hostPlatform.isDarwin [
xcbuild
];
buildInputs = [
zlib
];
installPhase = ''
runHook preInstall
EXTENSIONS_ROOT=$out/lib/ghidra/Ghidra/Extensions
mkdir -p $EXTENSIONS_ROOT
unzip -d $EXTENSIONS_ROOT $src
mkdir -p $EXTENSIONS_ROOT/GnuDisassembler/build
cp -r build/os $EXTENSIONS_ROOT/GnuDisassembler/build/
runHook postInstall
'';
meta = with lib; {
description = "Leverage the binutils disassembler capabilities for various processors";
homepage = "https://ghidra-sre.org/";
downloadPage = "https://github.com/NationalSecurityAgency/ghidra/tree/master/GPL/GnuDisassembler";
license = licenses.gpl2Only;
};
}

View File

@ -1,34 +0,0 @@
{ lib
, buildGhidraExtension
, ghidra
}:
buildGhidraExtension {
pname = "machinelearning";
version = lib.getVersion ghidra;
src = "${ghidra}/lib/ghidra/Extensions/Ghidra/${ghidra.distroPrefix}_MachineLearning.zip";
dontUnpack = true;
# Built as part ghidra
dontBuild = true;
installPhase = ''
runHook preInstall
mkdir -p $out/lib/ghidra/Ghidra/Extensions
unzip -d $out/lib/ghidra/Ghidra/Extensions $src
runHook postInstall
'';
meta = with lib; {
inherit (ghidra.meta) homepage license;
description = "Finds functions using ML";
downloadPage = "https://github.com/NationalSecurityAgency/ghidra/tree/master/Ghidra/Extensions/MachineLearning";
sourceProvenance = with sourceTypes; [
fromSource
binaryBytecode # deps
];
};
}

View File

@ -1,40 +0,0 @@
{ lib
, buildGhidraExtension
, ghidra
, python3
}:
buildGhidraExtension {
pname = "sleighdevtools";
version = lib.getVersion ghidra;
src = "${ghidra}/lib/ghidra/Extensions/Ghidra/${ghidra.distroPrefix}_SleighDevTools.zip";
dontUnpack = true;
# Built as part ghidra
dontBuild = true;
buildInputs = [ python3 ];
installPhase = ''
runHook preInstall
mkdir -p $out/lib/ghidra/Ghidra/Extensions
unzip -d $out/lib/ghidra/Ghidra/Extensions $src
runHook postInstall
'';
meta = with lib; {
inherit (ghidra.meta) homepage license;
description = "Sleigh language development tools including external disassembler capabilities";
longDescription = ''
Sleigh language development tools including external disassembler capabilities.
The GnuDisassembler extension may be also be required as a disassembly provider.
'';
downloadPage = "https://github.com/NationalSecurityAgency/ghidra/tree/master/Ghidra/Extensions/SleighDevTools";
sourceProvenance = with sourceTypes; [
fromSource
binaryBytecode # deps
];
};
}

View File

@ -1,36 +0,0 @@
{ lib
, stdenv
, callPackage
, symlinkJoin
, makeBinaryWrapper
, desktopToDarwinBundle
, ghidra
}:
let
ghidra-extensions = callPackage ./extensions.nix { inherit ghidra; };
allExtensions = lib.filterAttrs (n: pkg: lib.isDerivation pkg) ghidra-extensions;
/* Make Ghidra with additional extensions
Example:
pkgs.ghidra.withExtensions (p: with p; [
ghostrings
]);
=> /nix/store/3yn0rbnz5mbrxf0x70jbjq73wgkszr5c-ghidra-with-extensions-10.2.2
*/
withExtensions = f: (symlinkJoin {
name = "${ghidra.pname}-with-extensions-${lib.getVersion ghidra}";
paths = (f allExtensions);
nativeBuildInputs = [ makeBinaryWrapper ]
++ lib.optional stdenv.hostPlatform.isDarwin desktopToDarwinBundle;
postBuild = ''
makeWrapper '${ghidra}/bin/ghidra' "$out/bin/ghidra" \
--set NIX_GHIDRAHOME "$out/lib/ghidra/Ghidra"
ln -s ${ghidra}/share $out/share
'' + lib.optionalString stdenv.hostPlatform.isDarwin ''
convertDesktopFiles $prefix
'';
inherit (ghidra) meta;
});
in
withExtensions

View File

@ -1,5 +0,0 @@
{ kicad }:
{
kikit = kicad.callPackage ./kikit.nix { addonName = "kikit"; };
kikit-library = kicad.callPackage ./kikit.nix { addonName = "kikit-library"; };
}

View File

@ -1,52 +0,0 @@
# For building the multiple addons that are in the kikit repo.
{ stdenv
, bc
, kikit
, zip
, python3
, addonName
, addonPath
}:
let
# This python is only used when building the package, it's not the python
# environment that will ultimately run the code packaged here. The python env defined
# in KiCad will import the python code packaged here when KiCad starts up.
python = python3.withPackages (ps: with ps; [ click ]);
kikit-module = python3.pkgs.toPythonModule (kikit.override { inherit python3; });
# The following different addons can be built from the same source.
targetSpecs = {
"kikit" = {
makeTarget = "pcm-kikit";
resultZip = "pcm-kikit.zip";
description = "KiCad plugin and a CLI tool to automate several tasks in a standard KiCad workflow";
};
"kikit-library" = {
makeTarget = "pcm-lib";
resultZip = "pcm-kikit-lib.zip";
description = "KiKit uses these symbols and footprints to annotate your boards (e.g., to place a tab in a panel).";
};
};
targetSpec = targetSpecs.${addonName};
in
stdenv.mkDerivation {
name = "kicadaddon-${addonName}";
inherit (kikit-module) src version;
nativeBuildInputs = [ python bc zip ];
propagatedBuildInputs = [ kikit-module ];
buildPhase = ''
patchShebangs scripts/setJson.py
make ${targetSpec.makeTarget}
'';
installPhase = ''
mkdir $out
mv build/${targetSpec.resultZip} $out/${addonPath}
'';
meta = kikit-module.meta // {
description = targetSpec.description;
};
}

View File

@ -1,211 +0,0 @@
{ lib
, stdenv
, cmake
, libGLU
, libGL
, zlib
, wxGTK
, gtk3
, libX11
, gettext
, glew
, glm
, cairo
, curl
, openssl
, boost
, pkg-config
, doxygen
, graphviz
, pcre
, libpthreadstubs
, libXdmcp
, unixODBC
, libgit2
, libsecret
, libgcrypt
, libgpg-error
, util-linux
, libselinux
, libsepol
, libthai
, libdatrie
, libxkbcommon
, libepoxy
, dbus
, at-spi2-core
, libXtst
, pcre2
, libdeflate
, swig4
, python
, wxPython
, opencascade-occt_7_6
, libngspice
, valgrind
, stable
, testing
, baseName
, kicadSrc
, kicadVersion
, withNgspice
, withScripting
, withI18n
, debug
, sanitizeAddress
, sanitizeThreads
}:
assert lib.assertMsg (!(sanitizeAddress && sanitizeThreads))
"'sanitizeAddress' and 'sanitizeThreads' are mutually exclusive, use one.";
assert testing -> !stable
-> throw "testing implies stable and cannot be used with stable = false";
let
opencascade-occt = opencascade-occt_7_6;
inherit (lib) optional optionals optionalString;
in
stdenv.mkDerivation rec {
pname = "kicad-base";
version = if (stable) then kicadVersion else builtins.substring 0 10 src.rev;
src = kicadSrc;
patches = [
# upstream issue 12941 (attempted to upstream, but appreciably unacceptable)
./writable.patch
# https://gitlab.com/kicad/code/kicad/-/issues/15687
./runtime_stock_data_path.patch
];
# tagged releases don't have "unknown"
# kicad testing and nightlies use git describe --dirty
# nix removes .git, so its approximated here
postPatch = lib.optionalString (!stable || testing) ''
substituteInPlace cmake/KiCadVersion.cmake \
--replace "unknown" "${builtins.substring 0 10 src.rev}"
substituteInPlace cmake/CreateGitVersionHeader.cmake \
--replace "0000000000000000000000000000000000000000" "${src.rev}"
'';
makeFlags = optionals (debug) [ "CFLAGS+=-Og" "CFLAGS+=-ggdb" ];
cmakeFlags = [
"-DKICAD_USE_EGL=ON"
"-DOCC_INCLUDE_DIR=${opencascade-occt}/include/opencascade"
# https://gitlab.com/kicad/code/kicad/-/issues/17133
"-DCMAKE_CTEST_ARGUMENTS='--exclude-regex;qa_spice'"
]
++ optional (stdenv.hostPlatform.system == "aarch64-linux")
"-DCMAKE_CTEST_ARGUMENTS=--exclude-regex;'qa_spice|qa_cli'"
++ optional (stable && !withNgspice) "-DKICAD_SPICE=OFF"
++ optionals (!withScripting) [
"-DKICAD_SCRIPTING_WXPYTHON=OFF"
]
++ optionals (withI18n) [
"-DKICAD_BUILD_I18N=ON"
]
++ optionals (!doInstallCheck) [
"-DKICAD_BUILD_QA_TESTS=OFF"
]
++ optionals (debug) [
"-DKICAD_STDLIB_DEBUG=ON"
"-DKICAD_USE_VALGRIND=ON"
]
++ optionals (sanitizeAddress) [
"-DKICAD_SANITIZE_ADDRESS=ON"
]
++ optionals (sanitizeThreads) [
"-DKICAD_SANITIZE_THREADS=ON"
];
cmakeBuildType = if debug then "Debug" else "Release";
nativeBuildInputs = [
cmake
doxygen
graphviz
pkg-config
libgit2
libsecret
libgcrypt
libgpg-error
]
# wanted by configuration on linux, doesn't seem to affect performance
# no effect on closure size
++ optionals (stdenv.isLinux) [
util-linux
libselinux
libsepol
libthai
libdatrie
libxkbcommon
libepoxy
dbus
at-spi2-core
libXtst
pcre2
];
buildInputs = [
libGLU
libGL
zlib
libX11
wxGTK
gtk3
pcre
libXdmcp
gettext
glew
glm
libpthreadstubs
cairo
curl
openssl
boost
swig4
python
unixODBC
libdeflate
opencascade-occt
]
++ optional (withScripting) wxPython
++ optional (withNgspice) libngspice
++ optional (debug) valgrind;
# some ngspice tests attempt to write to $HOME/.cache/
# this could be and was resolved with XDG_CACHE_HOME = "$TMP";
# but failing tests still attempt to create $HOME
# and the newer CLI tests seem to also use $HOME...
HOME = "$TMP";
# debug builds fail all but the python test
doInstallCheck = !(debug);
installCheckTarget = "test";
nativeInstallCheckInputs = [
(python.withPackages(ps: with ps; [
numpy
pytest
cairosvg
pytest-image-diff
]))
];
dontStrip = debug;
meta = {
description = "Just the built source without the libraries";
longDescription = ''
Just the build products, the libraries are passed via an env var in the wrapper, default.nix
'';
homepage = "https://www.kicad.org/";
license = lib.licenses.gpl3Plus;
platforms = lib.platforms.all;
};
}

View File

@ -1,298 +0,0 @@
{ lib, stdenv
, runCommand
, newScope
, fetchFromGitLab
, fetchgit
, makeWrapper
, symlinkJoin
, callPackage
, callPackages
, gnome
, dconf
, gtk3
, wxGTK32
, librsvg
, cups
, gsettings-desktop-schemas
, hicolor-icon-theme
, unzip
, jq
, pname ? "kicad"
, stable ? true
, testing ? false
, withNgspice ? !stdenv.isDarwin
, libngspice
, withScripting ? true
, python3
, addons ? [ ]
, debug ? false
, sanitizeAddress ? false
, sanitizeThreads ? false
, with3d ? true
, withI18n ? true
, srcs ? { }
}:
# `addons`: https://dev-docs.kicad.org/en/addons/
#
# ```nix
# kicad = pkgs.kicad.override {
# addons = with pkgs.kicadAddons; [ kikit kikit-library ];
# };
# ```
# The `srcs` parameter can be used to override the kicad source code
# and all libraries, which are otherwise inaccessible
# to overlays since most of the kicad build expression has been
# refactored into base.nix, most of the library build expressions have
# been refactored into libraries.nix. Overrides are only applied when
# building `kicad-unstable`. The `srcs` parameter has
# no effect for stable `kicad`. `srcs` takes an attribute set in which
# any of the following attributes are meaningful (though none are
# mandatory): "kicad", "kicadVersion", "symbols", "templates",
# "footprints", "packages3d", and "libVersion". "kicadVersion" and
# "libVersion" should be set to a string with the desired value for
# the version attribute in kicad's `mkDerivation` and the version
# attribute in any of the library's `mkDerivation`, respectively.
# "kicad", "symbols", "templates", "footprints", and "packages3d"
# should be set to an appropriate fetcher (e.g. `fetchFromGitLab`).
# So, for example, a possible overlay for kicad is:
#
# final: prev:
# {
# kicad-unstable = (prev.kicad-unstable.override {
# srcs = {
# kicadVersion = "2020-10-08";
# kicad = prev.fetchFromGitLab {
# group = "kicad";
# owner = "code";
# repo = "kicad";
# rev = "fd22fe8e374ce71d57e9f683ba996651aa69fa4e";
# sha256 = "sha256-F8qugru/jU3DgZSpQXQhRGNFSk0ybFRkpyWb7HAGBdc=";
# };
# };
# });
# }
let
baseName = if (testing) then "kicad-testing"
else if (stable) then "kicad"
else "kicad-unstable";
versionsImport = import ./versions.nix;
# versions.nix does not provide us with version, src and rev. We
# need to turn this into approprate fetcher calls.
#kicadSrcFetch = fetchFromGitLab {
# group = "kicad";
# owner = "code";
# repo = "kicad";
# rev = versionsImport.${baseName}.kicadVersion.src.rev;
# sha256 = versionsImport.${baseName}.kicadVersion.src.sha256;
#};
kicadSrcFetch = fetchgit {
url = "https://git.lain.faith/haskal/kicad.git";
rev = versionsImport.${baseName}.kicadVersion.src.rev;
sha256 = versionsImport.${baseName}.kicadVersion.src.sha256;
};
libSrcFetch = name: fetchFromGitLab {
group = "kicad";
owner = "libraries";
repo = "kicad-${name}";
rev = versionsImport.${baseName}.libVersion.libSources.${name}.rev;
sha256 = versionsImport.${baseName}.libVersion.libSources.${name}.sha256;
};
# only override `src` or `version` if building `kicad-unstable` with
# the appropriate attribute defined in `srcs`.
srcOverridep = attr: (!stable && builtins.hasAttr attr srcs);
# use default source and version (as defined in versions.nix) by
# default, or use the appropriate attribute from `srcs` if building
# unstable with `srcs` properly defined.
kicadSrc =
if srcOverridep "kicad" then srcs.kicad
else kicadSrcFetch;
kicadVersion =
if srcOverridep "kicadVersion" then srcs.kicadVersion
else versionsImport.${baseName}.kicadVersion.version;
libSrc = name: if srcOverridep name then srcs.${name} else libSrcFetch name;
# TODO does it make sense to only have one version for all libs?
libVersion =
if srcOverridep "libVersion" then srcs.libVersion
else versionsImport.${baseName}.libVersion.version;
wxGTK = wxGTK32;
python = python3;
wxPython = python.pkgs.wxpython;
addonPath = "addon.zip";
addonsDrvs = map (pkg: pkg.override { inherit addonPath python3; }) addons;
addonsJoined =
runCommand "addonsJoined"
{
inherit addonsDrvs;
nativeBuildInputs = [ unzip jq ];
} ''
mkdir $out
for pkg in $addonsDrvs; do
unzip $pkg/addon.zip -d unpacked
folder_name=$(jq .identifier unpacked/metadata.json --raw-output | tr . _)
for d in unpacked/*; do
if [ -d "$d" ]; then
dest=$out/share/kicad/scripting/$(basename $d)/$folder_name
mkdir -p $(dirname $dest)
mv $d $dest
fi
done
rm -r unpacked
done
'';
inherit (lib) concatStringsSep flatten optionalString optionals;
in
stdenv.mkDerivation rec {
# Common libraries, referenced during runtime, via the wrapper.
passthru.libraries = callPackages ./libraries.nix { inherit libSrc; };
passthru.callPackage = newScope { inherit addonPath python3; };
base = callPackage ./base.nix {
inherit stable testing baseName;
inherit kicadSrc kicadVersion;
inherit wxGTK python wxPython;
inherit withNgspice withScripting withI18n;
inherit debug sanitizeAddress sanitizeThreads;
};
inherit pname;
version = if (stable) then kicadVersion else builtins.substring 0 10 src.src.rev;
src = base;
dontUnpack = true;
dontConfigure = true;
dontBuild = true;
dontFixup = true;
pythonPath = optionals (withScripting)
[ wxPython python.pkgs.six python.pkgs.requests ] ++ addonsDrvs;
nativeBuildInputs = [ makeWrapper ]
++ optionals (withScripting)
[ python.pkgs.wrapPython ];
# KICAD7_TEMPLATE_DIR only works with a single path (it does not handle : separated paths)
# but it's used to find both the templates and the symbol/footprint library tables
# https://gitlab.com/kicad/code/kicad/-/issues/14792
template_dir = symlinkJoin {
name = "KiCad_template_dir";
paths = with passthru.libraries; [
"${templates}/share/kicad/template"
"${footprints}/share/kicad/template"
"${symbols}/share/kicad/template"
];
};
# We are emulating wrapGAppsHook3, along with other variables to the wrapper
makeWrapperArgs = with passthru.libraries; [
"--prefix XDG_DATA_DIRS : ${base}/share"
"--prefix XDG_DATA_DIRS : ${hicolor-icon-theme}/share"
"--prefix XDG_DATA_DIRS : ${gnome.adwaita-icon-theme}/share"
"--prefix XDG_DATA_DIRS : ${gtk3}/share/gsettings-schemas/${gtk3.name}"
"--prefix XDG_DATA_DIRS : ${gsettings-desktop-schemas}/share/gsettings-schemas/${gsettings-desktop-schemas.name}"
# wrapGAppsHook3 did these two as well, no idea if it matters...
"--prefix XDG_DATA_DIRS : ${cups}/share"
"--prefix GIO_EXTRA_MODULES : ${dconf}/lib/gio/modules"
# required to open a bug report link in firefox-wayland
"--set-default MOZ_DBUS_REMOTE 1"
"--set-default KICAD8_FOOTPRINT_DIR ${footprints}/share/kicad/footprints"
"--set-default KICAD8_SYMBOL_DIR ${symbols}/share/kicad/symbols"
"--set-default KICAD8_TEMPLATE_DIR ${template_dir}"
]
++ optionals (addons != [ ]) (
let stockDataPath = symlinkJoin {
name = "kicad_stock_data_path";
paths = [
"${base}/share/kicad"
"${addonsJoined}/share/kicad"
];
};
in
[ "--set-default NIX_KICAD8_STOCK_DATA_PATH ${stockDataPath}" ]
)
++ optionals (with3d)
[
"--set-default KICAD8_3DMODEL_DIR ${packages3d}/share/kicad/3dmodels"
]
++ optionals (withNgspice) [ "--prefix LD_LIBRARY_PATH : ${libngspice}/lib" ]
# infinisil's workaround for #39493
++ [ "--set GDK_PIXBUF_MODULE_FILE ${librsvg}/lib/gdk-pixbuf-2.0/2.10.0/loaders.cache" ]
;
# why does $makeWrapperArgs have to be added explicitly?
# $out and $program_PYTHONPATH don't exist when makeWrapperArgs gets set?
installPhase =
let
bin = if stdenv.isDarwin then "*.app/Contents/MacOS" else "bin";
tools = [ "kicad" "pcbnew" "eeschema" "gerbview" "pcb_calculator" "pl_editor" "bitmap2component" ];
utils = [ "dxf2idf" "idf2vrml" "idfcyl" "idfrect" "kicad-cli" ];
in
(concatStringsSep "\n"
(flatten [
"runHook preInstall"
(optionalString (withScripting) "buildPythonPath \"${base} $pythonPath\" \n")
# wrap each of the directly usable tools
(map
(tool: "makeWrapper ${base}/${bin}/${tool} $out/bin/${tool} $makeWrapperArgs"
+ optionalString (withScripting) " --set PYTHONPATH \"$program_PYTHONPATH\""
)
tools)
# link in the CLI utils
(map (util: "ln -s ${base}/${bin}/${util} $out/bin/${util}") utils)
"runHook postInstall"
])
)
;
postInstall = ''
mkdir -p $out/share
ln -s ${base}/share/applications $out/share/applications
ln -s ${base}/share/icons $out/share/icons
ln -s ${base}/share/mime $out/share/mime
ln -s ${base}/share/metainfo $out/share/metainfo
'';
passthru.updateScript = {
command = [ ./update.sh "${pname}" ];
supportedFeatures = [ "commit" ];
};
meta = rec {
description = (if (stable)
then "Open Source Electronics Design Automation suite"
else if (testing) then "Open Source EDA suite, latest on stable branch"
else "Open Source EDA suite, latest on master branch")
+ (lib.optionalString (!with3d) ", without 3D models");
homepage = "https://www.kicad.org/";
longDescription = ''
KiCad is an open source software suite for Electronic Design Automation.
The Programs handle Schematic Capture, and PCB Layout with Gerber output.
'';
license = lib.licenses.gpl3Plus;
maintainers = with lib.maintainers; [ evils ];
platforms = lib.platforms.all;
broken = stdenv.isDarwin;
mainProgram = "kicad";
};
}

View File

@ -1,39 +0,0 @@
{ lib, stdenv
, cmake
, gettext
, libSrc
, stepreduce
, parallel
, zip
}:
let
mkLib = name:
stdenv.mkDerivation {
pname = "kicad-${name}";
version = builtins.substring 0 10 (libSrc name).rev;
src = libSrc name;
nativeBuildInputs = [ cmake ]
++ lib.optionals (name == "packages3d") [
stepreduce
parallel
zip
];
postInstall = lib.optional (name == "packages3d") ''
find $out -type f -name '*.step' | parallel 'stepreduce {} {} && zip -9 {.}.stpZ {} && rm {}'
'';
meta = rec {
license = lib.licenses.cc-by-sa-40;
platforms = lib.platforms.all;
};
};
in
{
symbols = mkLib "symbols";
templates = mkLib "templates";
footprints = mkLib "footprints";
packages3d = mkLib "packages3d";
}

View File

@ -1,15 +0,0 @@
diff --git a/common/paths.cpp b/common/paths.cpp
index a74cdd9..790cc58 100644
--- a/common/paths.cpp
+++ b/common/paths.cpp
@@ -151,6 +151,10 @@ wxString PATHS::GetStockDataPath( bool aRespectRunFromBuildDir )
{
wxString path;
+ if( wxGetEnv( wxT( "NIX_KICAD8_STOCK_DATA_PATH" ), &path ) ) {
+ return path;
+ }
+
if( aRespectRunFromBuildDir && wxGetEnv( wxT( "KICAD_RUN_FROM_BUILD_DIR" ), nullptr ) )
{
// Allow debugging from build dir by placing relevant files/folders in the build root

View File

@ -1,260 +0,0 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p coreutils git nix curl jq
# shellcheck shell=bash enable=all
set -e
shopt -s inherit_errexit
# this script will generate versions.nix in the right location
# this should contain the versions' revs and hashes
# the stable revs are stored only for ease of skipping
# by default nix-prefetch-url uses XDG_RUNTIME_DIR as tmp
# which is /run/user/1000, which defaults to 10% of your RAM
# unless you have over 64GB of ram that'll be insufficient
# resulting in "tar: no space left on device" for packages3d
# hence:
export TMPDIR=/tmp
# if something goes unrepairably wrong, run 'update.sh all clean'
# TODO
# support parallel instances for each pname
# currently risks reusing old data
# no getting around manually checking if the build product works...
# if there is, default to commiting?
# won't work when running in parallel?
# remove items left in /nix/store?
# reuse hashes of already checked revs (to avoid redownloading testing's packages3d)
# nixpkgs' update.nix passes in UPDATE_NIX_PNAME to indicate which package is being updated
# assigning a default value to that as shellcheck doesn't like the use of unassigned variables
: "${UPDATE_NIX_PNAME:=""}"
# update.nix can also parse JSON output of this script to formulate a commit
# this requires we collect the version string in the old versions.nix for the updated package
old_version=""
new_version=""
# get the latest tag that isn't an RC or *.99
latest_tags="$(git ls-remote --tags --sort -version:refname https://gitlab.com/kicad/code/kicad.git)"
# using a scratch variable to ensure command failures get caught (SC2312)
scratch="$(grep -o 'refs/tags/[0-9]*\.[0-9]*\.[0-9]*$' <<< "${latest_tags}")"
scratch="$(grep -ve '\.99' -e '\.9\.9' <<< "${scratch}")"
scratch="$(sed -n '1p' <<< "${scratch}")"
latest_tag="$(cut -d '/' -f 3 <<< "${scratch}")"
# get the latest branch name for testing
branches="$(git ls-remote --heads --sort -version:refname https://gitlab.com/kicad/code/kicad.git)"
scratch="$(grep -o 'refs/heads/[0-9]*\.[0-9]*$' <<< "${branches}")"
scratch="$(sed -n '1p' <<< "${scratch}")"
testing_branch="$(cut -d '/' -f 3 <<< "${scratch}")"
# "latest_tag" and "master" directly refer to what we want
# "testing" uses "testing_branch" found above
all_versions=( "${latest_tag}" testing master )
prefetch="nix-prefetch-url --unpack --quiet"
clean=""
check_stable=""
check_testing=1
check_unstable=1
commit=""
for arg in "$@" "${UPDATE_NIX_PNAME}"; do
case "${arg}" in
help|-h|--help) echo "Read me!" >&2; exit 1; ;;
kicad|kicad-small|release|tag|stable|5*|6*|7*|8*) check_stable=1; check_testing=""; check_unstable="" ;;
*testing|kicad-testing-small) check_testing=1; check_unstable="" ;;
*unstable|*unstable-small|master|main) check_unstable=1; check_testing="" ;;
latest|now|today) check_unstable=1; check_testing=1 ;;
all|both|full) check_stable=1; check_testing=1; check_unstable=1 ;;
clean|fix|*fuck) check_stable=1; check_testing=1; check_unstable=1; clean=1 ;;
commit) commit=1 ;;
*) ;;
esac
done
here="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
commit_date() {
gitlab_json="$(curl -s https://gitlab.com/api/v4/projects/kicad%2Fcode%2Fkicad/repository/commits/"$1")"
commit_created="$(jq .created_at --raw-output <<< "${gitlab_json}")"
date --date="${commit_created}" --iso-8601 --utc
}
file="${here}/versions.nix"
# just in case this runs in parallel
tmp="${here}/,versions.nix.${RANDOM}"
libs=( symbols templates footprints packages3d )
get_rev() {
git ls-remote "$@"
}
gitlab="https://gitlab.com/kicad"
# append commit hash or tag
src_pre="https://gitlab.com/api/v4/projects/kicad%2Fcode%2Fkicad/repository/archive.tar.gz?sha="
lib_pre="https://gitlab.com/api/v4/projects/kicad%2Flibraries%2Fkicad-"
lib_mid="/repository/archive.tar.gz?sha="
# number of items updated
count=0
printf "Latest tag is %s\n" "${latest_tag}" >&2
if [[ ! -f ${file} ]]; then
echo "No existing file, generating from scratch" >&2
check_stable=1; check_testing=1; check_unstable=1; clean=1
fi
printf "Writing %s\n" "${tmp}" >&2
# not a dangling brace, grouping the output to redirect to file
{
printf "# This file was generated by update.sh\n\n"
printf "{\n"
for version in "${all_versions[@]}"; do
src_version=${version};
lib_version=${version};
# testing is the stable branch on the main repo
# but the libraries don't have such a branch
# only the latest release tag and a master branch
if [[ ${version} == "testing" ]]; then
src_version=${testing_branch};
lib_version=${latest_tag};
fi
if [[ ${version} == "master" ]]; then
pname="kicad-unstable"
elif [[ ${version} == "testing" ]]; then
pname="kicad-testing"
else
pname="kicad"
fi
# skip a version if we don't want to check it
if [[ (-n ${check_stable} && ${version} != "master" && ${version} != "testing") \
|| (-n ${check_testing} && ${version} == "testing") \
|| (-n ${check_unstable} && ${version} == "master" ) ]]; then
now=$(commit_date "${src_version}")
if [[ ${version} == "master" ]]; then
pname="kicad-unstable"
new_version="${now}"
elif [[ ${version} == "testing" ]]; then
pname="kicad-testing"
new_version="${testing_branch}-${now}"
else
pname="kicad"
new_version="${version}"
fi
printf "\nChecking %s\n" "${pname}" >&2
printf "%2s\"%s\" = {\n" "" "${pname}"
printf "%4skicadVersion = {\n" ""
printf "%6sversion =\t\t\t\"%s\";\n" "" "${new_version}"
printf "%6ssrc = {\n" ""
echo "Checking src" >&2
scratch="$(get_rev "${gitlab}"/code/kicad.git "${src_version}")"
src_rev="$(cut -f1 <<< "${scratch}")"
has_rev="$(grep -sm 1 "\"${pname}\"" -A 4 "${file}" | grep -sm 1 "${src_rev}" || true)"
has_hash="$(grep -sm 1 "\"${pname}\"" -A 5 "${file}" | grep -sm 1 "sha256" || true)"
old_version="$(grep -sm 1 "\"${pname}\"" -A 3 "${file}" | grep -sm 1 "version" | awk -F "\"" '{print $2}' || true)"
if [[ -n ${has_rev} && -n ${has_hash} && -z ${clean} ]]; then
echo "Reusing old ${pname}.src.sha256, already latest .rev at ${old_version}" >&2
scratch=$(grep -sm 1 "\"${pname}\"" -A 5 "${file}")
grep -sm 1 "rev" -A 1 <<< "${scratch}"
else
prefetched="$(${prefetch} "${src_pre}${src_rev}")"
printf "%8srev =\t\t\t\"%s\";\n" "" "${src_rev}"
printf "%8ssha256 =\t\t\"%s\";\n" "" "${prefetched}"
count=$((count+1))
fi
printf "%6s};\n" ""
printf "%4s};\n" ""
printf "%4slibVersion = {\n" ""
printf "%6sversion =\t\t\t\"%s\";\n" "" "${new_version}"
printf "%6slibSources = {\n" ""
for lib in "${libs[@]}"; do
echo "Checking ${lib}" >&2
url="${gitlab}/libraries/kicad-${lib}.git"
scratch="$(get_rev "${url}" "${lib_version}")"
scratch="$(cut -f1 <<< "${scratch}")"
lib_rev="$(tail -n1 <<< "${scratch}")"
has_rev="$(grep -sm 1 "\"${pname}\"" -A 19 "${file}" | grep -sm 1 "${lib_rev}" || true)"
has_hash="$(grep -sm 1 "\"${pname}\"" -A 20 "${file}" | grep -sm 1 "${lib}.sha256" || true)"
if [[ -n ${has_rev} && -n ${has_hash} && -z ${clean} ]]; then
echo "Reusing old kicad-${lib}-${new_version}.src.sha256, already latest .rev" >&2
scratch="$(grep -sm 1 "\"${pname}\"" -A 20 "${file}")"
grep -sm 1 "${lib}" -A 1 <<< "${scratch}"
else
prefetched="$(${prefetch} "${lib_pre}${lib}${lib_mid}${lib_rev}")"
printf "%8s%s.rev =\t" "" "${lib}"
case "${lib}" in
symbols|templates) printf "\t" ;; *) ;;
esac
printf "\"%s\";\n" "${lib_rev}"
printf "%8s%s.sha256 =\t\"%s\";\n" "" "${lib}" "${prefetched}"
count=$((count+1))
fi
done
printf "%6s};\n" ""
printf "%4s};\n" ""
printf "%2s};\n" ""
else
printf "\nReusing old %s\n" "${pname}" >&2
grep -sm 1 "\"${pname}\"" -A 21 "${file}"
fi
done
printf "}\n"
} > "${tmp}"
if grep '""' "${tmp}"; then
echo "empty value detected, out of space?" >&2
exit "1"
fi
mv "${tmp}" "${file}"
printf "\nFinished\nMoved output to %s\n\n" "${file}" >&2
if [[ ${count} -gt 0 ]]; then
if [[ ${count} -gt 1 ]]; then s="s"; else s=""; fi
echo "${count} revision${s} changed" >&2
if [[ -n ${commit} ]]; then
git commit -am "$(printf "kicad: automatic update of %s item%s\n" "${count}" "${s}")"
fi
echo "Please confirm the new versions.nix works before making a PR." >&2
else
echo "No changes, those checked are up to date" >&2
fi
# using UPDATE_NIX_ATTR_PATH to detect if this is being called from update.nix
# and output JSON to describe the changes
if [[ -n ${UPDATE_NIX_ATTR_PATH} ]]; then
if [[ ${count} -eq 0 ]]; then echo "[{}]"; exit 0; fi
jq -n \
--arg attrpath "${UPDATE_NIX_PNAME}" \
--arg oldversion "${old_version}" \
--arg newversion "${new_version}" \
--arg file "${file}" \
'[{
"attrPath": $attrpath,
"oldVersion": $oldversion,
"newVersion": $newversion,
"files": [ $file ]
}]'
fi

View File

@ -1,70 +0,0 @@
# This file was generated by update.sh
{
"kicad" = {
kicadVersion = {
version = "8.0.2";
src = {
rev = "2d5434e9abf570ffd19b22c90963ea71cfb91d3d";
sha256 = "1n1jj7559xd4ib4c6ybya75a5hbarnkfy8gxzxfw58wdb4lxxmzz";
};
};
libVersion = {
version = "8.0.2";
libSources = {
symbols.rev = "099ac0c8ac402a685fde00b1369e34a116e29661";
symbols.sha256 = "0w333f89yw2m0zlpkg0k6hfwlj10snm8laihdjnsb22asyz4pbhn";
templates.rev = "2e2da58e02707d327d59d4101c401a82dc9a26f6";
templates.sha256 = "073a6cyvzzy0vmkj3ip4ziq7b7pcizs70nm5acw838dxghjfyv3v";
footprints.rev = "e8c30550cde4945cbe1bf30cccf0b3c1e2bda6c6";
footprints.sha256 = "10j8qjljc1fv8k4zp3zn0da33g57hn6pgrgmbgp18dsa539xvxcz";
packages3d.rev = "249f7947587529026e1676cd70c8d7493a8d8162";
packages3d.sha256 = "04gvfb54jhnww2qwrxc27wpyrvmjasdc4xhr0ridl7dglh4qcp35";
};
};
};
# "kicad-testing" = {
# kicadVersion = {
# version = "8.0-2024-02-23";
# src = {
# rev = "14d71c8ca6b48d2eb956bb069acf05a37b1b2652";
# sha256 = "0xqd0xbpnvsvba75526nwgzr8l2cfxy99sjmg13sjxfx7rq16kqi";
# };
# };
# libVersion = {
# version = "8.0-2024-02-23";
# libSources = {
# symbols.rev = "e228d4e8b295364e90e36c57f4023d8285ba88cd";
# symbols.sha256 = "049h2a7yn6ks8sybppixa872dbvyd0rwf9r6nixvdg6d13fl6rwf";
# templates.rev = "2e00c233b67e35323f90d04c190bf70237a252f2";
# templates.sha256 = "0m9bggz3cm27kqpjjwxy19mqzk0c69bywcjkqcni7kafr21c6k4z";
# footprints.rev = "6e5329a6d4aaa81290e23af3eba88f505c2f61b0";
# footprints.sha256 = "0ypjlbmzmcl3pha3q2361va70c988b1drxy8320gm66jkzfc21a1";
# packages3d.rev = "d1e521228d9f5888836b1a6a35fb05fb925456fa";
# packages3d.sha256 = "0lcy1av7ixg1f7arflk50jllpc1749sfvf3h62hkxsz97wkr97xj";
# };
# };
# };
# "kicad-unstable" = {
# kicadVersion = {
# version = "2024-02-23";
# src = {
# rev = "b7b64d959f37f00bb0d14b007c3b3908196e1024";
# sha256 = "1gl7mjqpmqq4m55z6crwb77983g00gi2161ichsc7hsfhs4c8grh";
# };
# };
# libVersion = {
# version = "2024-02-23";
# libSources = {
# symbols.rev = "8b0c343d8694fe0a968e5c4af69fd161bacf7da1";
# symbols.sha256 = "049h2a7yn6ks8sybppixa872dbvyd0rwf9r6nixvdg6d13fl6rwf";
# templates.rev = "0a6c4f798a68a5c639d54b4d3093460ab9267816";
# templates.sha256 = "0m9bggz3cm27kqpjjwxy19mqzk0c69bywcjkqcni7kafr21c6k4z";
# footprints.rev = "ded6b053460faae5783c538a38e91e2b4bddcf2e";
# footprints.sha256 = "035bf37n4vrihaj4zfdncisdx9fly1vya7lhkxhlsbv5blpi4a5y";
# packages3d.rev = "984667325076d4e50dab14e755aeacf97f42194c";
# packages3d.sha256 = "0lkaxv02h4sxrnm8zr17wl9d07mazlisad78r35gry741i362cdg";
# };
# };
# };
}

View File

@ -1,49 +0,0 @@
commit 6a72fd032405515e468797be91b5a6ebcbbb5fd8
Author: Evils <evils.devils@protonmail.com>
Date: Wed Nov 23 19:49:13 2022 +0100
ensure new projects are writable
diff --git a/kicad/kicad_manager_frame.cpp b/kicad/kicad_manager_frame.cpp
index 7ee8090858..391514519c 100644
--- a/kicad/kicad_manager_frame.cpp
+++ b/kicad/kicad_manager_frame.cpp
@@ -638,6 +638,12 @@ void KICAD_MANAGER_FRAME::CreateNewProject( const wxFileName& aProjectFileName,
// wxFFile dtor will close the file
}
+
+ if( destFileName.IsOk() && !destFileName.IsFileWritable() )
+ {
+ destFileName.SetPermissions(0644);
+ }
+
}
}
diff --git a/kicad/project_template.cpp b/kicad/project_template.cpp
index bf951fcddb..2bef94326b 100644
--- a/kicad/project_template.cpp
+++ b/kicad/project_template.cpp
@@ -282,6 +282,21 @@ bool PROJECT_TEMPLATE::CreateProject( wxFileName& aNewProjectPath, wxString* aEr
result = false;
}
+ else if( !destFile.IsFileWritable() && !destFile.SetPermissions(0644) )
+ {
+ if( aErrorMsg )
+ {
+ if( !aErrorMsg->empty() )
+ *aErrorMsg += "\n";
+
+ wxString msg;
+
+ msg.Printf( _( "Cannot make file writable: '%s'." ), destFile.GetFullPath() );
+ *aErrorMsg += msg;
+ }
+
+ result = false;
+ }
}
return result;

View File

@ -1,23 +0,0 @@
{
fetchgit,
buildDunePackage,
ppxlib,
uunf
}:
buildDunePackage rec {
pname = "ppx_unicode";
version = "0.1.0";
src = fetchgit {
url = "https://git.lain.faith/haskal/ppx_unicode.git";
rev = version;
hash = "sha256-WUrVW/JndDoMLPx5VSQmlcfafxPxwQe2l7CuTnxtV7Q=";
};
minimalOcamlVersion = "5.1";
dontStrip = true;
nativeBuildInputs = [ ppxlib ];
propagatedBuildInputs = [ ppxlib uunf ];
}

View File

@ -1,25 +0,0 @@
{
fetchgit,
buildDunePackage,
ptime,
ppxlib,
ppx_unicode
}:
buildDunePackage rec {
pname = "xlog";
version = "0.0.2";
src = fetchgit {
url = "https://git.lain.faith/haskal/xlog.git";
rev = version;
hash = "sha256-Su1Udp7ICrMuD7D8TtyuNnfwTBK5ZefFk89miqwrxDc=";
};
minimalOcamlVersion = "5.1";
dontStrip = true;
buildInputs = [ ppx_unicode ];
propagatedBuildInputs = [ ptime ppxlib ];
nativeBuildInputs = [ ppxlib ppx_unicode ];
}

View File

@ -1,25 +0,0 @@
import argparse
import sys
from . import compatibility, validateStream
from .formatter.text_plain import Formatter
def main():
parser = argparse.ArgumentParser(description="W3C feedvalidator")
parser.add_argument("-b", "--base", type=str, required=False, default="",
help="Base URL of document")
parser.add_argument("file", type=argparse.FileType("rb"), help="File to validate")
args = parser.parse_args()
events = validateStream(args.file, base=args.base)['loggedEvents']
events = compatibility.AA(events)
fmt = Formatter(events)
if len(fmt) > 0:
for f in fmt:
print(f)
sys.exit(1)
if __name__ == "__main__":
main()

View File

@ -1,61 +0,0 @@
{
lib,
fetchFromGitHub,
stdenvNoCC,
buildPythonPackage,
setuptools,
wheel,
lxml,
html5lib,
rdflib
}:
let feedvalidator_src = stdenvNoCC.mkDerivation {
name = "feedvalidator-src";
src = fetchFromGitHub {
owner = "w3c";
repo = "feedvalidator";
rev = "1bbf6d9c68ef074b824c452fbc5d1f7817e6adae";
sha256 = "sha256-sHc6cgjSNcd0BcYYeybGPayQNV8SK9GjUglWg9iOQko=";
};
installPhase = ''
mkdir -p "$out"
cp -r src/feedvalidator/ "$out"
cp "${./bin.py}" "$out/feedvalidator/bin.py"
cp requirements.txt "$out"
cat > "$out"/pyproject.toml <<EOF
[build-system]
requires = ["setuptools>=60", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "feedvalidator"
version = "0.0.1+git"
requires-python = ">=3.11"
dynamic = ["dependencies"]
[project.scripts]
feedvalidator = "feedvalidator.bin:main"
[tool.setuptools.dynamic]
dependencies = {file = ["requirements.txt"]}
EOF
'';
};
in buildPythonPackage rec {
pname = "feedvalidator";
version = "git";
pyproject = true;
src = feedvalidator_src;
nativeBuildInputs = [ setuptools wheel ];
# this should match requirements.txt
propagatedBuildInputs = [ lxml html5lib rdflib ];
doCheck = false;
pythonImportsCheck = [ "feedvalidator" ];
}