uboot: (firmwareOdroidC2/C4) don't invoke patch tool, use patches = [] instead

https://github.com/NixOS/nixpkgs/blob/master/pkgs/stdenv/generic/setup.sh#L948
this can do it nicely.

Signed-off-by: Anton Arapov <anton@deadbeef.mx>
This commit is contained in:
Anton Arapov 2021-04-03 12:58:10 +02:00 committed by Alan Daniels
commit 56de2bcd43
30691 changed files with 3076956 additions and 0 deletions

View file

@ -0,0 +1,23 @@
{ fetchurl, fetchzip }:
{
x86_64-darwin = fetchzip {
sha256 = "sha256-z7QFnx414sdGazUZIenAVA+7LcSZT0tTb/ldv1SGV3Q=";
url = "https://github.com/AdguardTeam/AdGuardHome/releases/download/v0.107.7/AdGuardHome_darwin_amd64.zip";
};
aarch64-darwin = fetchzip {
sha256 = "sha256-Pbl7YaDVxdER/ubOiPq54ASB4ILnH0B3GiQlQBe7gFs=";
url = "https://github.com/AdguardTeam/AdGuardHome/releases/download/v0.107.7/AdGuardHome_darwin_arm64.zip";
};
i686-linux = fetchurl {
sha256 = "sha256-P2PsSdpW5i2hwBPUKb+viYewlVHTER/eBkwPp3koawo=";
url = "https://github.com/AdguardTeam/AdGuardHome/releases/download/v0.107.7/AdGuardHome_linux_386.tar.gz";
};
x86_64-linux = fetchurl {
sha256 = "sha256-cbTlVBlGdFgEz2b6pb0SJ7yUf4wFXnZwLCkmvX75FzU=";
url = "https://github.com/AdguardTeam/AdGuardHome/releases/download/v0.107.7/AdGuardHome_linux_amd64.tar.gz";
};
aarch64-linux = fetchurl {
sha256 = "sha256-TKZ3bOM5oq30GtLn9ifNyY6+2Li4nf1+r2L0ExG/10c=";
url = "https://github.com/AdguardTeam/AdGuardHome/releases/download/v0.107.7/AdGuardHome_linux_arm64.tar.gz";
};
}

View file

@ -0,0 +1,29 @@
{ lib, stdenv, fetchurl, fetchzip, nixosTests }:
let
inherit (stdenv.hostPlatform) system;
sources = import ./bins.nix { inherit fetchurl fetchzip; };
in
stdenv.mkDerivation rec {
pname = "adguardhome";
version = "0.107.7";
src = sources.${system} or (throw "Source for ${pname} is not available for ${system}");
installPhase = ''
install -m755 -D ./AdGuardHome $out/bin/adguardhome
'';
passthru = {
updateScript = ./update.sh;
tests.adguardhome = nixosTests.adguardhome;
};
meta = with lib; {
homepage = "https://github.com/AdguardTeam/AdGuardHome";
description = "Network-wide ads & trackers blocking DNS server";
platforms = builtins.attrNames sources;
maintainers = with maintainers; [ numkem iagoq ];
license = licenses.gpl3Only;
};
}

View file

@ -0,0 +1,39 @@
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p curl gnugrep nix-prefetch jq
# This file is based on /pkgs/servers/gotify/update.sh
set -euo pipefail
dirname="$(dirname "$0")"
bins="$dirname/bins.nix"
latest_release=$(curl --silent https://api.github.com/repos/AdguardTeam/AdGuardHome/releases/latest)
version=$(jq -r '.tag_name' <<<"$latest_release")
echo "got version $version"
declare -A systems
systems[linux_386]=i686-linux
systems[linux_amd64]=x86_64-linux
systems[linux_arm64]=aarch64-linux
systems[darwin_amd64]=x86_64-darwin
systems[darwin_arm64]=aarch64-darwin
echo '{ fetchurl, fetchzip }:' > "$bins"
echo '{' >> "$bins"
for asset in $(curl --silent https://api.github.com/repos/AdguardTeam/AdGuardHome/releases/latest | jq -c '.assets[]') ; do
url="$(jq -r '.browser_download_url' <<< "$asset")"
adg_system="$(grep -Eo '(darwin|linux)_(386|amd64|arm64)' <<< "$url" || true)"
if [ -n "$adg_system" ]; then
fetch="$(grep '\.zip$' <<< "$url" > /dev/null && echo fetchzip || echo fetchurl)"
nix_system=${systems[$adg_system]}
nix_src="$(nix-prefetch -s --output nix $fetch --url $url)"
echo "$nix_system = $fetch $nix_src;" >> $bins
fi
done
echo '}' >> "$bins"
sed -i -r -e "s/version\s*?=\s*?.*?;/version = \"${version#v}\";/" "$dirname/default.nix"

View file

@ -0,0 +1,51 @@
{ lib, stdenv, fetchurl, php, nix-update-script }:
stdenv.mkDerivation rec {
version = "4.8.1";
pname = "adminer";
# not using fetchFromGitHub as the git repo relies on submodules that are included in the tar file
src = fetchurl {
url = "https://github.com/vrana/adminer/releases/download/v${version}/adminer-${version}.tar.gz";
sha256 = "sha256-2rkNq79sc5RBFxWuiaSlpWr0rwrnEFlnW1WcoxjoP2M=";
};
nativeBuildInputs = [
php
php.packages.composer
];
buildPhase = ''
runHook preBuild
composer --no-cache run compile
runHook postBuild
'';
installPhase = ''
runHook preInstall
mkdir $out
cp adminer-${version}.php $out/adminer.php
runHook postInstall
'';
passthru = {
updateScript = nix-update-script {
attrPath = pname;
};
};
meta = with lib; {
description = "Database management in a single PHP file";
homepage = "https://www.adminer.org";
license = with licenses; [ asl20 gpl2Only ];
maintainers = with maintainers; [
jtojnar
sstef
];
platforms = platforms.all;
};
}

View file

@ -0,0 +1,24 @@
{ lib, buildGoModule, fetchFromSourcehut }:
buildGoModule rec {
pname = "alps";
version = "2022-06-03";
src = fetchFromSourcehut {
owner = "~migadu";
repo = "alps";
rev = "9cb23b09975e95f6a5952e3718eaf471c3e3510f";
hash = "sha256-BUV1/BRIXHEf2FU1rdmNgueo8KSUlMKbIpAg2lFs3hA=";
};
vendorSha256 = "sha256-cpY+lYM/nAX3nUaFknrRAavxDk8UDzJkoqFjJ1/KWeg=";
proxyVendor = true;
meta = with lib; {
description = "A simple and extensible webmail.";
homepage = "https://git.sr.ht/~migadu/alps";
license = licenses.mit;
maintainers = with maintainers; [ gordias booklearner ];
};
}

View file

@ -0,0 +1,26 @@
{ lib, stdenv, fetchfossil, openssl }:
stdenv.mkDerivation rec {
pname = "althttpd";
version = "unstable-2022-01-10";
src = fetchfossil {
url = "https://sqlite.org/althttpd/";
rev = "83196564d05f33c3";
sha256 = "sha256-z/XMVnDihcO56kJaXIJGUUdnz8mR5jlySrLZX1tkV5c=";
};
buildInputs = [ openssl ];
installPhase = ''
install -Dm755 -t $out/bin althttpd
'';
meta = with lib; {
description = "The Althttpd webserver";
homepage = "https://sqlite.org/althttpd/";
license = licenses.publicDomain;
maintainers = with maintainers; [ siraben ];
platforms = platforms.all;
};
}

View file

@ -0,0 +1,52 @@
{ lib, stdenv
, fetchpatch
, fetchurl
, boost
, cmake
, libuuid
, python3
, ruby
}:
stdenv.mkDerivation rec {
pname = "qpid-cpp";
version = "1.39.0";
src = fetchurl {
url = "mirror://apache/qpid/cpp/${version}/${pname}-${version}.tar.gz";
hash = "sha256-eYDQ6iHVV1WUFFdyHGnbqGIjE9CrhHzh0jP7amjoDSE=";
};
nativeBuildInputs = [ cmake python3 ];
buildInputs = [ boost libuuid ruby ];
patches = [
(fetchpatch {
name = "python3-managementgen";
url = "https://github.com/apache/qpid-cpp/commit/0e558866e90ef3d5becbd2f6d5630a6a6dc43a5d.patch";
hash = "sha256-pV6xx8Nrys/ZxIO0Z/fARH0ELqcSdTXLPsVXYUd3f70=";
})
];
# the subdir managementgen wants to install python stuff in ${python} and
# the installation tries to create some folders in /var
postPatch = ''
sed -i '/managementgen/d' CMakeLists.txt
sed -i '/ENV/d' src/CMakeLists.txt
sed -i '/management/d' CMakeLists.txt
'';
NIX_CFLAGS_COMPILE = toString ([
"-Wno-error=maybe-uninitialized"
] ++ lib.optionals stdenv.cc.isGNU [
"-Wno-error=deprecated-copy"
]);
meta = with lib; {
homepage = "https://qpid.apache.org";
description = "An AMQP message broker and a C++ messaging API";
license = licenses.asl20;
platforms = platforms.linux;
maintainers = with maintainers; [ cpages ];
};
}

View file

@ -0,0 +1,88 @@
{ lib
, stdenv
, fetchurl
, erlang
, elixir
, python3
, libxml2
, libxslt
, xmlto
, docbook_xml_dtd_45
, docbook_xsl
, zip
, unzip
, rsync
, getconf
, socat
, procps
, coreutils
, gnused
, systemd
, glibcLocales
, AppKit
, Carbon
, Cocoa
, nixosTests
}:
stdenv.mkDerivation rec {
pname = "rabbitmq-server";
version = "3.9.14";
# when updating, consider bumping elixir version in all-packages.nix
src = fetchurl {
url = "https://github.com/rabbitmq/rabbitmq-server/releases/download/v${version}/${pname}-${version}.tar.xz";
sha256 = "sha256-c6GpB6CSCHiU9hTC9FkxyTc1UpNWxx5iP3y2dbTUfS0=";
};
nativeBuildInputs = [ unzip xmlto docbook_xml_dtd_45 docbook_xsl zip rsync python3 ];
buildInputs = [ erlang elixir libxml2 libxslt glibcLocales ]
++ lib.optionals stdenv.isDarwin [ AppKit Carbon Cocoa ];
outputs = [ "out" "man" "doc" ];
installFlags = [ "PREFIX=$(out)" "RMQ_ERLAPP_DIR=$(out)" ];
installTargets = [ "install" "install-man" ];
preBuild = ''
export LANG=C.UTF-8 # fix elixir locale warning
'';
runtimePath = lib.makeBinPath ([
erlang
getconf # for getting memory limits
socat
procps
gnused
coreutils # used by helper scripts
] ++ lib.optionals stdenv.isLinux [ systemd ]); # for systemd unit activation check
postInstall = ''
# rabbitmq-env calls to sed/coreutils, so provide everything early
sed -i $out/sbin/rabbitmq-env -e '2s|^|PATH=${runtimePath}\''${PATH:+:}\$PATH/\n|'
# We know exactly where rabbitmq is gonna be, so we patch that into the env-script.
# By doing it early we make sure that auto-detection for this will
# never be executed (somewhere below in the script).
sed -i $out/sbin/rabbitmq-env -e "2s|^|RABBITMQ_SCRIPTS_DIR=$out/sbin\n|"
# theres a few stray files that belong into share
mkdir -p $doc/share/doc/rabbitmq-server
mv $out/LICENSE* $doc/share/doc/rabbitmq-server
# and an unecessarily copied INSTALL file
rm $out/INSTALL
'';
passthru.tests = {
vm-test = nixosTests.rabbitmq;
};
meta = with lib; {
homepage = "https://www.rabbitmq.com/";
description = "An implementation of the AMQP messaging protocol";
license = licenses.mpl20;
platforms = platforms.unix;
maintainers = with maintainers; [ turion ];
};
}

View file

@ -0,0 +1,67 @@
{ lib
, fetchFromGitHub
, python3
, anki
}:
python3.pkgs.buildPythonApplication rec {
pname = "ankisyncd";
version = "2.2.0";
src = fetchFromGitHub {
owner = "ankicommunity";
repo = "anki-sync-server";
rev = version;
sha256 = "196xhd6vzp1ncr3ahz0bv0gp1ap2s37j8v48dwmvaywzayakqdab";
};
format = "other";
installPhase = ''
runHook preInstall
mkdir -p $out/${python3.sitePackages}
cp -r ankisyncd utils ankisyncd.conf $out/${python3.sitePackages}
mkdir $out/share
cp ankisyncctl.py $out/share/
runHook postInstall
'';
fixupPhase = ''
PYTHONPATH="$PYTHONPATH:$out/${python3.sitePackages}:${anki}"
makeWrapper "${python3.interpreter}" "$out/bin/ankisyncd" \
--set PYTHONPATH $PYTHONPATH \
--add-flags "-m ankisyncd"
makeWrapper "${python3.interpreter}" "$out/bin/ankisyncctl" \
--set PYTHONPATH $PYTHONPATH \
--add-flags "$out/share/ankisyncctl.py"
'';
checkInputs = with python3.pkgs; [
pytest
webtest
];
buildInputs = [ ];
propagatedBuildInputs = [ anki ];
checkPhase = ''
# Exclude tests that require sqlite's sqldiff command, since
# it isn't yet packaged for NixOS, although 2 PRs exist:
# - https://github.com/NixOS/nixpkgs/pull/69112
# - https://github.com/NixOS/nixpkgs/pull/75784
# Once this is merged, these tests can be run as well.
pytest --ignore tests/test_web_media.py tests/
'';
meta = with lib; {
description = "Self-hosted Anki sync server";
maintainers = with maintainers; [ matt-snider ];
homepage = "https://github.com/ankicommunity/anki-sync-server";
license = licenses.agpl3Only;
platforms = platforms.linux;
};
}

View file

@ -0,0 +1,69 @@
{ lib, stdenv, fetchurl, jdk8_headless, jdk11_headless, makeWrapper, bash, coreutils, gnugrep, gnused, ps,
majorVersion ? "1.0" }:
let
jre8 = jdk8_headless;
jre11 = jdk11_headless;
versionMap = {
"2.7" = {
kafkaVersion = "2.7.1";
scalaVersion = "2.13";
sha256 = "1qv6blf99211bc80xnd4k42r9v9c5vilyqkplyhsa6hqymg32gfa";
jre = jre11;
};
"2.8" = {
kafkaVersion = "2.8.1";
scalaVersion = "2.13";
sha256 = "0fgil47hxdnc374k0p9sxv6b163xknp3pkihv3r99p977czb1228";
jre = jre11;
};
};
in
with versionMap.${majorVersion};
stdenv.mkDerivation rec {
version = "${scalaVersion}-${kafkaVersion}";
pname = "apache-kafka";
src = fetchurl {
url = "mirror://apache/kafka/${kafkaVersion}/kafka_${version}.tgz";
inherit sha256;
};
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ jre bash gnugrep gnused coreutils ps ];
installPhase = ''
mkdir -p $out
cp -R config libs $out
mkdir -p $out/bin
cp bin/kafka* $out/bin
cp bin/connect* $out/bin
# allow us the specify logging directory using env
substituteInPlace $out/bin/kafka-run-class.sh \
--replace 'LOG_DIR="$base_dir/logs"' 'LOG_DIR="$KAFKA_LOG_DIR"'
substituteInPlace $out/bin/kafka-server-stop.sh \
--replace 'ps' '${ps}/bin/ps'
for p in $out/bin\/*.sh; do
wrapProgram $p \
--set JAVA_HOME "${jre}" \
--set KAFKA_LOG_DIR "/tmp/apache-kafka-logs" \
--prefix PATH : "${bash}/bin:${coreutils}/bin:${gnugrep}/bin:${gnused}/bin"
done
chmod +x $out/bin\/*
'';
meta = with lib; {
homepage = "https://kafka.apache.org";
description = "A high-throughput distributed messaging system";
license = licenses.asl20;
maintainers = [ maintainers.ragge ];
platforms = platforms.unix;
};
passthru = { inherit jre; };
}

View file

@ -0,0 +1,62 @@
{ lib, stdenv, fetchurl, pkg-config, systemd, util-linux, coreutils, wall, hostname, man
, enableCgiScripts ? true, gd
}:
assert enableCgiScripts -> gd != null;
stdenv.mkDerivation rec {
pname = "apcupsd";
version = "3.14.14";
src = fetchurl {
url = "mirror://sourceforge/${pname}/${pname}-${version}.tar.gz";
sha256 = "0rwqiyzlg9p0szf3x6q1ppvrw6f6dbpn2rc5z623fk3bkdalhxyv";
};
nativeBuildInputs = [ pkg-config ];
buildInputs = [ util-linux man ] ++ lib.optional enableCgiScripts gd;
prePatch = ''
sed -e "s,\$(INSTALL_PROGRAM) \$(STRIP),\$(INSTALL_PROGRAM)," \
-i ./src/apcagent/Makefile ./autoconf/targets.mak
'';
# ./configure ignores --prefix, so we must specify some paths manually
# There is no real reason for a bin/sbin split, so just use bin.
preConfigure = ''
export ac_cv_path_SHUTDOWN=${systemd}/sbin/shutdown
export ac_cv_path_WALL=${wall}/bin/wall
sed -i 's|/bin/cat|${coreutils}/bin/cat|' configure
export configureFlags="\
--bindir=$out/bin \
--sbindir=$out/bin \
--sysconfdir=$out/etc/apcupsd \
--mandir=$out/share/man \
--with-halpolicydir=$out/share/halpolicy \
--localstatedir=/var/ \
--with-nologin=/run \
--with-log-dir=/var/log/apcupsd \
--with-pwrfail-dir=/run/apcupsd \
--with-lock-dir=/run/lock \
--with-pid-dir=/run \
--enable-usb \
${lib.optionalString enableCgiScripts "--enable-cgi --with-cgi-bin=$out/libexec/cgi-bin"}
"
'';
postInstall = ''
for file in "$out"/etc/apcupsd/*; do
sed -i -e 's|^WALL=.*|WALL="${wall}/bin/wall"|g' \
-e 's|^HOSTNAME=.*|HOSTNAME=`${hostname}/bin/hostname`|g' \
"$file"
done
'';
meta = with lib; {
description = "Daemon for controlling APC UPSes";
homepage = "http://www.apcupsd.com/";
license = licenses.gpl2;
platforms = platforms.linux;
maintainers = [ maintainers.bjornfor ];
};
}

View file

@ -0,0 +1,145 @@
{ stdenv, lib, fetchurl, fetchsvn, fetchFromGitHub,
jansson, libedit, libxml2, libxslt, ncurses, openssl, sqlite,
util-linux, dmidecode, libuuid, newt,
lua, speex, libopus, opusfile, libogg,
srtp, wget, curl, iksemel, pkg-config,
autoconf, libtool, automake,
python39, writeScript,
withOpus ? true,
}:
let
common = {version, sha256, externals}: stdenv.mkDerivation {
inherit version;
pname = "asterisk";
buildInputs = [ jansson libedit libxml2 libxslt ncurses openssl sqlite
dmidecode libuuid newt
lua speex
srtp wget curl iksemel ]
++ lib.optionals withOpus [ libopus opusfile libogg ];
nativeBuildInputs = [ util-linux pkg-config autoconf libtool automake ];
patches = [
# We want the Makefile to install the default /var skeleton
# under ${out}/var but we also want to use /var at runtime.
# This patch changes the runtime behavior to look for state
# directories in /var rather than ${out}/var.
./runtime-vardirs.patch
] ++ lib.optional withOpus "${asterisk-opus}/asterisk.patch";
postPatch = ''
echo "PJPROJECT_CONFIG_OPTS += --prefix=$out" >> third-party/pjproject/Makefile.rules
'';
src = fetchurl {
url = "https://downloads.asterisk.org/pub/telephony/asterisk/old-releases/asterisk-${version}.tar.gz";
inherit sha256;
};
# The default libdir is $PREFIX/usr/lib, which causes problems when paths
# compiled into Asterisk expect ${out}/usr/lib rather than ${out}/lib.
# Copy in externals to avoid them being downloaded;
# they have to be copied, because the modification date is checked.
# If you are getting a permission denied error on this dir,
# you're likely missing an automatically downloaded dependency
preConfigure = ''
mkdir externals_cache
${lib.concatStringsSep "\n"
(lib.mapAttrsToList (dst: src: "cp -r --no-preserve=mode ${src} ${dst}") externals)}
${lib.optionalString (externals ? "addons/mp3") "bash contrib/scripts/get_mp3_source.sh || true"}
chmod -w externals_cache
${lib.optionalString withOpus ''
cp ${asterisk-opus}/include/asterisk/* ./include/asterisk
cp ${asterisk-opus}/codecs/* ./codecs
cp ${asterisk-opus}/formats/* ./formats
''}
./bootstrap.sh
'';
configureFlags = [
"--libdir=\${out}/lib"
"--with-lua=${lua}/lib"
"--with-pjproject-bundled"
"--with-externals-cache=$(PWD)/externals_cache"
];
preBuild = ''
make menuselect.makeopts
${lib.optionalString (externals ? "addons/mp3") ''
substituteInPlace menuselect.makeopts --replace 'format_mp3 ' ""
''}
${lib.optionalString withOpus ''
substituteInPlace menuselect.makeopts --replace 'codec_opus_open_source ' ""
substituteInPlace menuselect.makeopts --replace 'format_ogg_opus_open_source ' ""
''}
'';
postInstall = ''
# Install sample configuration files for this version of Asterisk
make samples
${lib.optionalString (lib.versionAtLeast version "17.0.0") "make install-headers"}
'';
meta = with lib; {
description = "Software implementation of a telephone private branch exchange (PBX)";
homepage = "https://www.asterisk.org/";
license = licenses.gpl2Only;
maintainers = with maintainers; [ auntie DerTim1 yorickvp ];
};
};
pjproject_2_12 = fetchurl {
url = "https://raw.githubusercontent.com/asterisk/third-party/master/pjproject/2.12/pjproject-2.12.tar.bz2";
hash = "sha256-T3q4r/4WCAZCNGnULxMnNKH9wEK7gkseV/sV8IPasHQ=";
};
mp3-202 = fetchsvn {
url = "http://svn.digium.com/svn/thirdparty/mp3/trunk";
rev = "202";
sha256 = "1s9idx2miwk178sa731ig9r4fzx4gy1q8xazfqyd7q4lfd70s1cy";
};
asterisk-opus = fetchFromGitHub {
owner = "traud";
repo = "asterisk-opus";
# No releases, points to master as of 2022-04-06
rev = "a959f072d3f364be983dd27e6e250b038aaef747";
sha256 = "sha256-CASlTvTahOg9D5jccF/IN10LP/U8rRy9BFCSaHGQfCw=";
};
# auto-generated by update.py
versions = lib.mapAttrs (_: {version, sha256}: common {
inherit version sha256;
externals = {
"externals_cache/pjproject-2.12.tar.bz2" = pjproject_2_12;
"addons/mp3" = mp3-202;
};
}) (lib.importJSON ./versions.json);
updateScript_python = python39.withPackages (p: with p; [ packaging beautifulsoup4 requests ]);
updateScript = writeScript "asterisk-update" ''
#!/usr/bin/env bash
exec ${updateScript_python}/bin/python ${toString ./update.py}
'';
in {
# Supported releases (as of 2022-04-05).
# Source: https://wiki.asterisk.org/wiki/display/AST/Asterisk+Versions
# Exact version can be found at https://www.asterisk.org/downloads/asterisk/all-asterisk-versions/
#
# Series Type Rel. Date Sec. Fixes EOL
# 16.x LTS 2018-10-09 2022-10-09 2023-10-09
# 18.x LTS 2020-10-20 2024-10-20 2025-10-20
# 19.x Standard 2021-11-02 2022-11-02 2023-11-02
asterisk-lts = versions.asterisk_18;
asterisk-stable = versions.asterisk_19;
asterisk = versions.asterisk_19.overrideAttrs (o: {
passthru = (o.passthru or {}) // { inherit updateScript; };
});
} // versions

View file

@ -0,0 +1,50 @@
diff -rupN asterisk-14.1.2/build_tools/make_defaults_h asterisk-14.1.2-patched/build_tools/make_defaults_h
--- asterisk-14.1.2/build_tools/make_defaults_h 2016-11-10 20:43:02.000000000 +0100
+++ asterisk-14.1.2-patched/build_tools/make_defaults_h 2016-11-16 10:09:04.189625495 +0100
@@ -1,4 +1,13 @@
#!/bin/sh
+
+ASTLOGDIR=/var/log/asterisk
+ASTVARRUNDIR=/run/asterisk
+ASTVARLIBDIR=/var/lib/asterisk
+ASTDBDIR=${ASTVARLIBDIR}
+ASTDATADIR=${ASTVARLIBDIR}
+AGI_DIR=${ASTDATADIR}/agi-bin
+ASTSPOOLDIR=/var/spool/asterisk
+
cat << END
/*
* defaults.h
@@ -9,21 +18,21 @@ cat << END
#define DEFAULT_CONFIG_DIR "${INSTALL_PATH}${ASTETCDIR}"
#define DEFAULT_MODULE_DIR "${INSTALL_PATH}${ASTMODDIR}"
-#define DEFAULT_AGI_DIR "${INSTALL_PATH}${AGI_DIR}"
-#define DEFAULT_LOG_DIR "${INSTALL_PATH}${ASTLOGDIR}"
+#define DEFAULT_AGI_DIR "${AGI_DIR}"
+#define DEFAULT_LOG_DIR "${ASTLOGDIR}"
-#define DEFAULT_RUN_DIR "${INSTALL_PATH}${ASTVARRUNDIR}"
-#define DEFAULT_SOCKET "${INSTALL_PATH}${ASTVARRUNDIR}/asterisk.ctl"
-#define DEFAULT_PID "${INSTALL_PATH}${ASTVARRUNDIR}/asterisk.pid"
+#define DEFAULT_RUN_DIR "${ASTVARRUNDIR}"
+#define DEFAULT_SOCKET "${ASTVARRUNDIR}/asterisk.ctl"
+#define DEFAULT_PID "${ASTVARRUNDIR}/asterisk.pid"
-#define DEFAULT_VAR_DIR "${INSTALL_PATH}${ASTVARLIBDIR}"
-#define DEFAULT_DB "${INSTALL_PATH}${ASTDBDIR}/astdb"
+#define DEFAULT_VAR_DIR "${ASTVARLIBDIR}"
+#define DEFAULT_DB "${ASTDBDIR}/astdb"
-#define DEFAULT_DATA_DIR "${INSTALL_PATH}${ASTDATADIR}"
-#define DEFAULT_KEY_DIR "${INSTALL_PATH}${ASTDATADIR}/keys"
+#define DEFAULT_DATA_DIR "${ASTDATADIR}"
+#define DEFAULT_KEY_DIR "${ASTDATADIR}/keys"
-#define DEFAULT_SPOOL_DIR "${INSTALL_PATH}${ASTSPOOLDIR}"
-#define DEFAULT_TMP_DIR "${INSTALL_PATH}${ASTSPOOLDIR}/tmp"
+#define DEFAULT_SPOOL_DIR "${ASTSPOOLDIR}"
+#define DEFAULT_TMP_DIR "${ASTSPOOLDIR}/tmp"
#define DEFAULT_SBIN_DIR "${INSTALL_PATH}${ASTSBINDIR}"
END

View file

@ -0,0 +1,34 @@
{ lib, stdenv, fetchFromGitHub, binutils-unwrapped, patchelf, asterisk }:
stdenv.mkDerivation rec {
pname = "asterisk-module-sccp";
version = "4.3.4";
src = fetchFromGitHub {
owner = "chan-sccp";
repo = "chan-sccp";
rev = "v${version}";
sha256 = "sha256-YGHK4A03Ba/tnVTnu9VuhIy/xQ5C/7ZX8h9mxqKsnZI=";
};
nativeBuildInputs = [ patchelf ];
configureFlags = [ "--with-asterisk=${asterisk}" ];
installFlags = [ "DESTDIR=/build/dest" "DATAROOTDIR=/build/dest" ];
postInstall = ''
mkdir -p "$out"
cp -r /build/dest/${asterisk}/* "$out"
'';
postFixup = ''
p="$out/lib/asterisk/modules/chan_sccp.so"
patchelf --set-rpath "$p:${lib.makeLibraryPath [ binutils-unwrapped ]}" "$p"
'';
meta = with lib; {
description = "Replacement for the SCCP channel driver in Asterisk";
license = licenses.gpl1Only;
maintainers = with maintainers; [ das_j ];
};
}

41
pkgs/servers/asterisk/update.py Executable file
View file

@ -0,0 +1,41 @@
#!/usr/bin/env nix-shell
#!nix-shell -i python3 -p python39 python39.pkgs.packaging python39.pkgs.beautifulsoup4 python39.pkgs.requests
# mirrored in ./default.nix
from packaging import version
from bs4 import BeautifulSoup
import re, requests, json
import os, sys
from pathlib import Path
URL = "https://downloads.asterisk.org/pub/telephony/asterisk"
page = requests.get(URL)
changelog = re.compile("^ChangeLog-\d+\.\d+\.\d+$")
changelogs = [a.get_text() for a in BeautifulSoup(page.text, 'html.parser').find_all('a') if changelog.match(a.get_text())]
major_versions = {}
for changelog in changelogs:
v = version.parse(changelog.removeprefix("ChangeLog-"))
major_versions.setdefault(v.major, []).append(v)
out = {}
for mv in major_versions.keys():
v = max(major_versions[mv])
sha = requests.get(f"{URL}/asterisk-{v}.sha256").text.split()[0]
out["asterisk_" + str(mv)] = {
"version": str(v),
"sha256": sha
}
versions_path = Path(sys.argv[0]).parent / "versions.json"
try:
with open(versions_path, "r") as in_file:
in_data = json.loads(in_file.read())
for v in in_data.keys():
print(v + ":", in_data[v]["version"], "->", out[v]["version"])
except:
# nice to have for the PR, not a requirement
pass
with open(versions_path, "w") as out_file:
out_file.write(json.dumps(out, sort_keys=True, indent=2) + "\n")

View file

@ -0,0 +1,14 @@
{
"asterisk_16": {
"sha256": "201c92e591fc1db2c71b264907beef594d62d660168d42b5e83f9dc593b1bce0",
"version": "16.26.1"
},
"asterisk_18": {
"sha256": "acbb58e5c3cd2b9c7c4506fa80b717c3c3c550ce9722ff0177b4f11f98725563",
"version": "18.12.1"
},
"asterisk_19": {
"sha256": "6b0b985163f20fcc8f8878069b8a9ee725eef4cfbdb1c1031fe3840fb32d7abe",
"version": "19.4.1"
}
}

View file

@ -0,0 +1,38 @@
{ stdenvNoCC, lib, fetchurl, mysql_jdbc
, withMysql ? true
}:
stdenvNoCC.mkDerivation rec {
pname = "atlassian-bamboo";
version = "8.1.4";
src = fetchurl {
url = "https://product-downloads.atlassian.com/software/bamboo/downloads/atlassian-bamboo-${version}.tar.gz";
sha256 = "sha256-v30Q3yGKkpHQFitOcH764SE6KuCdUJWn50buY7pb/Ng=";
};
buildPhase = ''
echo "bamboo.home=/run/bamboo/home" > atlassian-bamboo/WEB-INF/classes/bamboo-init.properties
mv conf/server.xml conf/server.xml.dist
ln -sf /run/atlassian-bamboo/server.xml conf/server.xml
rm -r logs; ln -sf /run/atlassian-bamboo/logs/ .
rm -r temp; ln -sf /run/atlassian-bamboo/temp/ .
rm -r work; ln -sf /run/atlassian-bamboo/work/ .
'' + lib.optionalString withMysql ''
cp -v ${mysql_jdbc}/share/java/*jar atlassian-bamboo/lib/
'';
installPhase = ''
cp -rva . $out
patchShebangs $out/bin
'';
meta = with lib; {
description = "Bamboo Data Center is a continuous delivery server.";
homepage = "https://www.atlassian.com/software/bamboo";
sourceProvenance = with sourceTypes; [ binaryBytecode ];
license = with licenses; [ unfree ];
maintainers = with maintainers; [ techknowlogick ];
};
}

View file

@ -0,0 +1,50 @@
{ stdenvNoCC, lib, fetchurl, mysql_jdbc ? null
, enableSSO ? false
, crowdProperties ? null
, withMysql ? true
}:
assert withMysql -> (mysql_jdbc != null);
stdenvNoCC.mkDerivation rec {
pname = "atlassian-confluence";
version = "7.18.1";
src = fetchurl {
url = "https://product-downloads.atlassian.com/software/confluence/downloads/${pname}-${version}.tar.gz";
sha256 = "sha256-MEq1ASnJUYWPvt7Z30+fUTv+QrDI+Xsb5e9K0c8ZtdQ=";
};
buildPhase = ''
echo "confluence.home=/run/confluence/home" > confluence/WEB-INF/classes/confluence-init.properties
mv conf/server.xml conf/server.xml.dist
ln -sf /run/confluence/home/deploy conf/Standalone
ln -sf /run/confluence/server.xml conf/server.xml
rm -r logs; ln -sf /run/confluence/logs/ .
rm -r work; ln -sf /run/confluence/work/ .
rm -r temp; ln -sf /run/confluence/temp/ .
'' + lib.optionalString enableSSO ''
substituteInPlace confluence/WEB-INF/classes/seraph-config.xml \
--replace com.atlassian.confluence.user.ConfluenceAuthenticator\
com.atlassian.confluence.user.ConfluenceCrowdSSOAuthenticator
'' + lib.optionalString (crowdProperties != null) ''
cat <<EOF > confluence/WEB-INF/classes/crowd.properties
${crowdProperties}
EOF
'' + lib.optionalString withMysql ''
cp -v ${mysql_jdbc}/share/java/*jar confluence/WEB-INF/lib/
'';
installPhase = ''
cp -rva . $out
patchShebangs $out/bin
'';
meta = with lib; {
description = "Team collaboration software written in Java and mainly used in corporate environments";
homepage = "https://www.atlassian.com/software/confluence";
sourceProvenance = with sourceTypes; [ binaryBytecode ];
license = licenses.unfree;
maintainers = with maintainers; [ fpletz globin willibutz ciil techknowlogick ];
};
}

View file

@ -0,0 +1,49 @@
{ lib, stdenv, fetchurl, home ? "/var/lib/crowd"
, port ? 8092, proxyUrl ? null, openidPassword ? "WILL_NEVER_BE_SET" }:
stdenv.mkDerivation rec {
pname = "atlassian-crowd";
version = "4.4.0";
src = fetchurl {
url = "https://www.atlassian.com/software/crowd/downloads/binary/${pname}-${version}.tar.gz";
sha256 = "0ipfvdjs8v02y37rmihljy9lkb3ycz5hyc14mcg65ilsscsq3x91";
};
buildPhase = ''
mv apache-tomcat/conf/server.xml apache-tomcat/conf/server.xml.dist
ln -s /run/atlassian-crowd/server.xml apache-tomcat/conf/server.xml
rm -rf apache-tomcat/{logs,work}
ln -s /run/atlassian-crowd/logs apache-tomcat/logs
ln -s /run/atlassian-crowd/work apache-tomcat/work
ln -s /run/atlassian-crowd/database database
substituteInPlace apache-tomcat/bin/startup.sh --replace start run
echo "crowd.home=${home}" > crowd-webapp/WEB-INF/classes/crowd-init.properties
substituteInPlace build.properties \
--replace "openidserver.url=http://localhost:8095/openidserver" \
"openidserver.url=http://localhost:${toString port}/openidserver"
substituteInPlace crowd-openidserver-webapp/WEB-INF/classes/crowd.properties \
--replace "http://localhost:8095/" \
"http://localhost:${toString port}/"
sed -r -i crowd-openidserver-webapp/WEB-INF/classes/crowd.properties \
-e 's,application.password\s+password,application.password ${openidPassword},'
'' + lib.optionalString (proxyUrl != null) ''
sed -i crowd-openidserver-webapp/WEB-INF/classes/crowd.properties \
-e 's,http://localhost:${toString port}/openidserver,${proxyUrl}/openidserver,'
'';
installPhase = ''
cp -rva . $out
'';
meta = with lib; {
description = "Single sign-on and identity management tool";
homepage = "https://www.atlassian.com/software/crowd";
license = licenses.unfree;
maintainers = with maintainers; [ fpletz globin ];
};
}

View file

@ -0,0 +1,46 @@
{ stdenv
, lib
, fetchurl
, gawk
, enableSSO ? false
, crowdProperties ? null
}:
stdenv.mkDerivation rec {
pname = "atlassian-jira";
version = "8.22.2";
src = fetchurl {
url = "https://product-downloads.atlassian.com/software/jira/downloads/atlassian-jira-software-${version}.tar.gz";
sha256 = "sha256-j9JUIK4GOdY9rMLPZcWbjWUh/s2ZkoVEQBNAIqHhdYI=";
};
buildPhase = ''
mv conf/server.xml conf/server.xml.dist
ln -sf /run/atlassian-jira/server.xml conf/server.xml
rm -r logs; ln -sf /run/atlassian-jira/logs/ .
rm -r work; ln -sf /run/atlassian-jira/work/ .
rm -r temp; ln -sf /run/atlassian-jira/temp/ .
substituteInPlace bin/check-java.sh \
--replace "awk" "${gawk}/bin/gawk"
'' + lib.optionalString enableSSO ''
substituteInPlace atlassian-jira/WEB-INF/classes/seraph-config.xml \
--replace com.atlassian.jira.security.login.JiraSeraphAuthenticator \
com.atlassian.jira.security.login.SSOSeraphAuthenticator
'' + lib.optionalString (crowdProperties != null) ''
cat <<EOF > atlassian-jira/WEB-INF/classes/crowd.properties
${crowdProperties}
EOF
'';
installPhase = ''
cp -rva . $out
'';
meta = with lib; {
description = "Proprietary issue tracking product, also providing project management functions";
homepage = "https://www.atlassian.com/software/jira";
license = licenses.unfree;
maintainers = with maintainers; [ fpletz globin ciil megheaiulian techknowlogick ];
};
}

View file

@ -0,0 +1,39 @@
{ stdenv, lib, fetchurl, makeWrapper, unzip, python3, unrar, ffmpeg, nixosTests }:
stdenv.mkDerivation rec {
pname = "bazarr";
version = "1.0.3";
sourceRoot = ".";
src = fetchurl {
url = "https://github.com/morpheus65535/bazarr/releases/download/v${version}/bazarr.zip";
sha256 = "sha256-VApcTYARC6NaVmwXgpzW8xRE23refGudBgPsyq7Ypig=";
};
nativeBuildInputs = [ unzip makeWrapper ];
installPhase = ''
mkdir -p $out/{bin,share/${pname}-${version}}
cp -r * $out/share/${pname}-${version}
makeWrapper "${
(python3.withPackages
(ps: [ ps.lxml ps.numpy ps.gevent ps.gevent-websocket ])).interpreter
}" \
$out/bin/bazarr \
--add-flags "$out/share/${pname}-${version}/bazarr.py" \
--suffix PATH : ${lib.makeBinPath [ unrar ffmpeg ]}
'';
passthru.tests = {
smoke-test = nixosTests.bazarr;
};
meta = with lib; {
description = "Subtitle manager for Sonarr and Radarr";
homepage = "https://www.bazarr.media/";
license = licenses.gpl3Only;
maintainers = with maintainers; [ d-xo ];
platforms = platforms.all;
};
}

View file

@ -0,0 +1,35 @@
{ lib, stdenv, fetchFromGitHub, installShellFiles, nixosTests }:
stdenv.mkDerivation rec {
version = "1.12";
pname = "beanstalkd";
src = fetchFromGitHub {
owner = "kr";
repo = "beanstalkd";
rev = "v${version}";
hash = "sha256-HChpVZ02l08CObrb4+ZEjBiXeQMMYi6zhSWUTDxuEao=";
};
hardeningDisable = [ "fortify" ];
makeFlags = [ "PREFIX=${placeholder "out"}" ];
nativeBuildInputs = [ installShellFiles ];
postInstall = ''
installManPage doc/beanstalkd.1
'';
passthru.tests = {
smoke-test = nixosTests.beanstalkd;
};
meta = with lib; {
homepage = "http://kr.github.io/beanstalkd/";
description = "A simple, fast work queue";
license = licenses.mit;
maintainers = [ maintainers.zimbatm ];
platforms = platforms.all;
};
}

View file

@ -0,0 +1,42 @@
{ buildGoModule, fetchFromGitHub, lib, symlinkJoin }:
let
generic = { modRoot, vendorSha256 }:
buildGoModule rec {
pname = "bird-lg-${modRoot}";
version = "unstable-2022-05-08";
src = fetchFromGitHub {
owner = "xddxdd";
repo = "bird-lg-go";
rev = "348295b9aa954a92df2cf6b1179846a9486dafc0";
sha256 = "sha256-2t8ZP9Uc0sJlqWiJMq3MVoARfMKsuTXJkuOid0oWgyY=";
};
doDist = false;
ldflags = [
"-s"
"-w"
];
inherit modRoot vendorSha256;
meta = with lib; {
description = "Bird Looking Glass";
homepage = "https://github.com/xddxdd/bird-lg-go";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ tchekda ];
};
};
bird-lg-frontend = generic {
modRoot = "frontend";
vendorSha256 = "sha256-WKuVGiSV5LZrJ8/672TRN6tZNQxdCktHV6nx0ZxCP4A=";
};
bird-lg-proxy = generic {
modRoot = "proxy";
vendorSha256 = "sha256-7LZeCY4xSxREsQ+Dc2XSpu2ZI8CLE0mz0yoThP7/OO4=";
};
in
symlinkJoin { name = "bird-lg"; paths = [ bird-lg-frontend bird-lg-proxy ]; }

View file

@ -0,0 +1,39 @@
{ lib, stdenv, fetchurl, fetchpatch, flex, bison, readline, libssh, nixosTests }:
stdenv.mkDerivation rec {
pname = "bird";
version = "2.0.9";
src = fetchurl {
sha256 = "sha256-dnhrvN7TBh4bsiGwEfLMACIewGPenNoASn2bBhoJbV4=";
url = "ftp://bird.network.cz/pub/bird/${pname}-${version}.tar.gz";
};
nativeBuildInputs = [ flex bison ];
buildInputs = [ readline libssh ];
patches = [
./dont-create-sysconfdir-2.patch
(fetchurl {
url = "https://gitlab.nic.cz/labs/bird/-/commit/fcb4dd0c831339c4374ace17d8f2ae6ebfeed279.patch";
sha256 = "sha256-PEgpRnOGLa1orHJDEHlblnVhBVv7XOKPR70M1wUMxMQ=";
})
];
CPP="${stdenv.cc.targetPrefix}cpp -E";
configureFlags = [
"--localstatedir=/var"
"--runstatedir=/run/bird"
];
passthru.tests = nixosTests.bird;
meta = with lib; {
description = "BIRD Internet Routing Daemon";
homepage = "http://bird.network.cz";
license = licenses.gpl2Plus;
maintainers = with maintainers; [ fpletz globin ];
platforms = platforms.linux;
};
}

View file

@ -0,0 +1,6 @@
--- a/Makefile.in
+++ b/Makefile.in
@@ -165,2 +165,2 @@
install: all
- $(INSTALL) -d $(DESTDIR)/$(sbindir) $(DESTDIR)/$(sysconfdir) $(DESTDIR)/$(runstatedir)
+ $(INSTALL) -d $(DESTDIR)/$(sbindir) $(DESTDIR)/$(sysconfdir)

View file

@ -0,0 +1,42 @@
{ lib
, buildGoModule
, fetchgit
, unstableGitUpdater
}:
buildGoModule {
pname = "bloat";
version = "unstable-2022-05-10";
src = fetchgit {
url = "git://git.freesoftwareextremist.com/bloat";
rev = "1661219ab6e3c12b29d676d57ce452feb81d0dd9";
sha256 = "sha256-Vb0WTRYPv0+g0by+h09sDDMVCjRYF28PwbXJNkdX6NA=";
};
vendorSha256 = null;
postInstall = ''
mkdir -p $out/share/bloat
cp -r templates $out/share/bloat/templates
cp -r static $out/share/bloat/static
sed \
-e "s%=templates%=$out/share/bloat/templates%g" \
-e "s%=static%=$out/share/bloat/static%g" \
< bloat.conf > $out/share/bloat/bloat.conf.example
'';
passthru.updateScript = unstableGitUpdater { };
meta = with lib; {
description = "A web client for Pleroma and Mastodon";
longDescription = ''
A lightweight web client for Pleroma and Mastodon.
Does not require JavaScript to display text, images, audio and videos.
'';
homepage = "https://bloat.freesoftwareextremist.com";
downloadPage = "https://git.freesoftwareextremist.com/bloat/";
license = licenses.cc0;
maintainers = with maintainers; [ fgaz ];
};
}

View file

@ -0,0 +1,71 @@
{ lib
, stdenv
, buildGoModule
, fetchFromGitHub
, pkg-config
, bzip2
, lz4
, rocksdb_6_23
, snappy
, zeromq
, zlib
, nixosTests
}:
let
rocksdb = rocksdb_6_23;
in
buildGoModule rec {
pname = "blockbook";
version = "0.3.6";
commit = "5f8cf45";
src = fetchFromGitHub {
owner = "trezor";
repo = "blockbook";
rev = "v${version}";
sha256 = "1jb195chy3kbspmv9vyg7llw6kgykkmvz3znd97mxf24f4q622jv";
};
vendorSha256 = "1w9c0qzah2f9rbjdxqajwrfkia25cwbn30gidviaid3b7ddpd7r8";
nativeBuildInputs = [ pkg-config ];
buildInputs = [ bzip2 lz4 rocksdb snappy zeromq zlib ];
ldflags = [
"-X github.com/trezor/blockbook/common.version=${version}"
"-X github.com/trezor/blockbook/common.gitcommit=${commit}"
"-X github.com/trezor/blockbook/common.buildDate=unknown"
];
tags = [ "rocksdb_6_16" ];
preBuild = lib.optionalString stdenv.isDarwin ''
ulimit -n 8192
'' + ''
export CGO_LDFLAGS="-L${stdenv.cc.cc.lib}/lib -lrocksdb -lz -lbz2 -lsnappy -llz4 -lm -lstdc++"
buildFlagsArray+=("-tags=${lib.concatStringsSep " " tags}")
buildFlagsArray+=("-ldflags=${lib.concatStringsSep " " ldflags}")
'';
subPackages = [ "." ];
postInstall = ''
mkdir -p $out/share/
cp -r $src/static/templates/ $out/share/
cp -r $src/static/css/ $out/share/
'';
passthru.tests = {
smoke-test = nixosTests.blockbook-frontend;
};
meta = with lib; {
description = "Trezor address/account balance backend";
homepage = "https://github.com/trezor/blockbook";
license = licenses.agpl3;
maintainers = with maintainers; [ mmahut _1000101 ];
platforms = platforms.unix;
};
}

View file

@ -0,0 +1,62 @@
{ lib, stdenv, fetchFromGitHub, libusb1, pkg-config, pmutils, udev} :
let
version = "2.1.1";
daemonlib = fetchFromGitHub {
owner = "Tinkerforge";
repo = "daemonlib";
rev = "brickd-${version}";
sha256 = "sha256-0HhuC4r1S4NJa2FSJa7+fNCfcoRTBckikYbGSE+2FbE=";
};
in
stdenv.mkDerivation {
pname = "brickd";
inherit version;
src = fetchFromGitHub {
owner = "Tinkerforge";
repo = "brickd";
rev = "v${version}";
sha256 = "sha256-6w2Ew+dLMmdRf9CF3TdKHa0d5ZgmX5lKIR+5t3QAWFQ=";
};
nativeBuildInputs = [ pkg-config ];
buildInputs = [ libusb1 pmutils udev ];
# shell thing didn't work so i replaced it using nix
prePatch = ''
substituteInPlace src/brickd/Makefile --replace 'PKG_CONFIG := $(shell which pkg-config 2> /dev/null)' "PKG_CONFIG := $pkgconfig/bin/pkg_config";
'';
buildPhase = ''
export
# build the brickd binary
mkdir src/daemonlib
cp -r ${daemonlib}/* src/daemonlib
cd src/brickd
make
# build and execute the unit tests
cd ../tests
make
for i in array_test base58_test node_test putenv_test queue_test sha1_test; do
echo "running unit test $i:"
./$i
done
'';
installPhase = ''
cd ../brickd
mkdir -p $out/bin
cp brickd $out/bin/brickd
'';
meta = {
homepage = "https://www.tinkerforge.com/";
description = "A daemon (or service on Windows) that acts as a bridge between the Bricks/Bricklets and the API bindings for the different programming languages";
maintainers = [ lib.maintainers.qknight ];
license = lib.licenses.gpl2;
platforms = lib.platforms.all;
};
}

View file

@ -0,0 +1,41 @@
{ lib, buildGoModule, fetchFromGitHub, nixosTests }:
let
version = "2.5.1";
dist = fetchFromGitHub {
owner = "caddyserver";
repo = "dist";
rev = "v${version}";
sha256 = "sha256-EXs+LNb87RWkmSWvs8nZIVqRJMutn+ntR241gqI7CUg=";
};
in
buildGoModule {
pname = "caddy";
inherit version;
subPackages = [ "cmd/caddy" ];
src = fetchFromGitHub {
owner = "caddyserver";
repo = "caddy";
rev = "v${version}";
sha256 = "sha256-Y4GAx/8XcW7+6eXCQ6k4e/3WZ/6MkTr5za1AXp6El9o=";
};
vendorSha256 = "sha256-xu3klc9yb4Ws8fvXRV286IDhi/zQVN1PKCiFKb8VJBo=";
postInstall = ''
install -Dm644 ${dist}/init/caddy.service ${dist}/init/caddy-api.service -t $out/lib/systemd/system
substituteInPlace $out/lib/systemd/system/caddy.service --replace "/usr/bin/caddy" "$out/bin/caddy"
substituteInPlace $out/lib/systemd/system/caddy-api.service --replace "/usr/bin/caddy" "$out/bin/caddy"
'';
passthru.tests = { inherit (nixosTests) caddy; };
meta = with lib; {
homepage = "https://caddyserver.com";
description = "Fast, cross-platform HTTP/2 web server with automatic HTTPS";
license = licenses.asl20;
maintainers = with maintainers; [ Br1ght0ne techknowlogick ];
};
}

View file

@ -0,0 +1,14 @@
diff --git a/cps/__init__.py b/cps/__init__.py
index 627cca0b..233bb2dd 100644
--- a/cps/__init__.py
+++ b/cps/__init__.py
@@ -87,6 +87,9 @@ db.CalibreDB.setup_db(config, cli.settingspath)
calibre_db = db.CalibreDB()
+if os.environ.get('__RUN_MIGRATIONS_AND_EXIT'):
+ sys.exit(0)
+
def create_app():
app.wsgi_app = ReverseProxied(app.wsgi_app)
# For python2 convert path to unicode

View file

@ -0,0 +1,17 @@
diff --git a/cps/logger.py b/cps/logger.py
index b204de31..3206e2bf 100644
--- a/cps/logger.py
+++ b/cps/logger.py
@@ -32,10 +32,10 @@ ACCESS_FORMATTER_TORNADO = Formatter("[%(asctime)s] %(message)s")
FORMATTER = Formatter("[%(asctime)s] %(levelname)5s {%(name)s:%(lineno)d} %(message)s")
DEFAULT_LOG_LEVEL = logging.INFO
-DEFAULT_LOG_FILE = os.path.join(_CONFIG_DIR, "calibre-web.log")
-DEFAULT_ACCESS_LOG = os.path.join(_CONFIG_DIR, "access.log")
LOG_TO_STDERR = '/dev/stderr'
LOG_TO_STDOUT = '/dev/stdout'
+DEFAULT_LOG_FILE = LOG_TO_STDOUT
+DEFAULT_ACCESS_LOG = LOG_TO_STDOUT
logging.addLevelName(logging.WARNING, "WARN")
logging.addLevelName(logging.CRITICAL, "CRIT")

View file

@ -0,0 +1,80 @@
{ lib
, fetchFromGitHub
, nixosTests
, python3
, python3Packages
}:
python3.pkgs.buildPythonApplication rec {
pname = "calibre-web";
version = "0.6.18";
src = fetchFromGitHub {
owner = "janeczku";
repo = "calibre-web";
rev = version;
sha256 = "sha256-KjmpFetNhNM5tL34e/Pn1i3hc86JZglubSMsHZWu198=";
};
propagatedBuildInputs = with python3Packages; [
advocate
backports_abc
flask-babel
flask_login
flask_principal
flask-wtf
iso-639
lxml
pypdf3
requests
sqlalchemy
tornado
unidecode
Wand
werkzeug
];
patches = [
# default-logger.patch switches default logger to /dev/stdout. Otherwise calibre-web tries to open a file relative
# to its location, which can't be done as the store is read-only. Log file location can later be configured using UI
# if needed.
./default-logger.patch
# DB migrations adds an env var __RUN_MIGRATIONS_ANDEXIT that, when set, instructs calibre-web to run DB migrations
# and exit. This is gonna be used to configure calibre-web declaratively, as most of its configuration parameters
# are stored in the DB.
./db-migrations.patch
];
# calibre-web doesn't follow setuptools directory structure. The following is taken from the script
# that calibre-web's maintainer is using to package it:
# https://github.com/OzzieIsaacs/calibre-web-test/blob/master/build/make_release.py
postPatch = ''
mkdir -p src/calibreweb
mv cps.py src/calibreweb/__init__.py
mv cps src/calibreweb
substituteInPlace setup.cfg \
--replace "cps = calibreweb:main" "calibre-web = calibreweb:main" \
--replace "Flask>=1.0.2,<2.1.0" "Flask>=1.0.2" \
--replace "Flask-Login>=0.3.2,<0.5.1" "Flask-Login>=0.3.2" \
--replace "flask-wtf>=0.14.2,<1.1.0" "flask-wtf>=0.14.2" \
--replace "lxml>=3.8.0,<4.9.0" "lxml>=3.8.0" \
--replace "PyPDF3>=1.0.0,<1.0.7" "PyPDF3>=1.0.0" \
--replace "requests>=2.11.1,<2.28.0" "requests" \
--replace "unidecode>=0.04.19,<1.4.0" "unidecode>=0.04.19" \
--replace "werkzeug<2.1.0" ""
'';
# Upstream repo doesn't provide any tests.
doCheck = false;
passthru.tests.calibre-web = nixosTests.calibre-web;
meta = with lib; {
description = "Web app for browsing, reading and downloading eBooks stored in a Calibre database";
homepage = "https://github.com/janeczku/calibre-web";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ pborzenkov ];
platforms = platforms.all;
};
}

View file

@ -0,0 +1,29 @@
{ lib, buildGoPackage, fetchFromGitHub }:
buildGoPackage rec {
pname = "cayley";
version = "0.7.5";
goPackagePath = "github.com/cayleygraph/cayley";
src = fetchFromGitHub {
owner = "cayleygraph";
repo = "cayley";
rev = "v${version}";
sha256 = "1zfxa9z6spi6xw028mvbc7c3g517gn82g77ywr6picl47fr2blnd";
};
goDeps = ./deps.nix;
ldflags = [
"-X=main.Version=${version}"
];
meta = {
homepage = "https://github.com/cayleygraph/cayley";
description = "A graph database inspired by Freebase and Knowledge Graph";
maintainers = with lib.maintainers; [ sigma ];
license = lib.licenses.asl20;
platforms = lib.platforms.unix;
};
}

471
pkgs/servers/cayley/deps.nix generated Normal file
View file

@ -0,0 +1,471 @@
# file generated from Gopkg.lock using dep2nix (https://github.com/nixcloud/dep2nix)
[
{
goPackagePath = "github.com/badgerodon/peg";
fetch = {
type = "git";
url = "https://github.com/badgerodon/peg";
rev = "9e5f7f4d07ca576562618c23e8abadda278b684f";
sha256 = "12vd7hzdgknn8byz77lmvcrz9m5lvmffdnz2wwk83304przkra11";
};
}
{
goPackagePath = "github.com/boltdb/bolt";
fetch = {
type = "git";
url = "https://github.com/boltdb/bolt";
rev = "e9cf4fae01b5a8ff89d0ec6b32f0d9c9f79aefdd";
sha256 = "1sjxzz88bw0y37mk3xvwb9j5v7bz3r80rwg79jml6liqk1arnl99";
};
}
{
goPackagePath = "github.com/cznic/mathutil";
fetch = {
type = "git";
url = "https://github.com/cznic/mathutil";
rev = "1447ad269d64ca91aa8d7079baa40b6fc8b965e7";
sha256 = "1r9c20k2h65g38yxf3vd46nbayx1cz5w4q4yr1xfggcs0mmrb87i";
};
}
{
goPackagePath = "github.com/davecgh/go-spew";
fetch = {
type = "git";
url = "https://github.com/davecgh/go-spew";
rev = "346938d642f2ec3594ed81d874461961cd0faa76";
sha256 = "0d4jfmak5p6lb7n2r6yvf5p1zcw0l8j74kn55ghvr7zr7b7axm6c";
};
}
{
goPackagePath = "github.com/dennwc/graphql";
fetch = {
type = "git";
url = "https://github.com/dennwc/graphql";
rev = "12cfed44bc5de083875506a36d30f9798f9bca47";
sha256 = "1rfsxjjsik5618y2741lcyw56a4d4l6r04sbj1igrvcck9bz0k6a";
};
}
{
goPackagePath = "github.com/dlclark/regexp2";
fetch = {
type = "git";
url = "https://github.com/dlclark/regexp2";
rev = "902a5ce7a7812e2ba9f73b9d96c09d5136df39cd";
sha256 = "0ypmdayq50ilbmqa1wjq5nvs9igbxkzlc8phlknw244935wz3v15";
};
}
{
goPackagePath = "github.com/dop251/goja";
fetch = {
type = "git";
url = "https://github.com/dop251/goja";
rev = "ef8c030e3c96c5054c2f10ef925e7041e0583c07";
sha256 = "15419apwdpbl0lgnl9xj9wyl05vpiz6jqgj8zbcyxhzy0wycj445";
};
}
{
goPackagePath = "github.com/fsnotify/fsnotify";
fetch = {
type = "git";
url = "https://github.com/fsnotify/fsnotify";
rev = "4da3e2cfbabc9f751898f250b49f2439785783a1";
sha256 = "1y2l9jaf99j6gidcfdgq3hifxyiwv4f7awpll80p170ixdbqxvl3";
};
}
{
goPackagePath = "github.com/go-kivik/couchdb";
fetch = {
type = "git";
url = "https://github.com/go-kivik/couchdb";
rev = "74d231fe43245e77840213724894264f0f61ffd3";
sha256 = "0ga6d6y44wg8ync73wcyc7q7r3sr5vdj5qkn3yqn9yn4p0k2w89i";
};
}
{
goPackagePath = "github.com/go-kivik/kivik";
fetch = {
type = "git";
url = "https://github.com/go-kivik/kivik";
rev = "2a1f6b9dd407886bc59c0c28faed28fbce3b0ece";
sha256 = "0fpa62mriyiyl5dh5kg8858bqrwiwscpbkg9np69lk302znxalij";
};
}
{
goPackagePath = "github.com/go-kivik/pouchdb";
fetch = {
type = "git";
url = "https://github.com/go-kivik/pouchdb";
rev = "bbd1ab79be17c809842e193b1f84e924b6b599ba";
sha256 = "15kv6i94j73c8zzy5hnmf051d3i65wxc07hvass9lc4g5ad7f9vf";
};
}
{
goPackagePath = "github.com/go-sourcemap/sourcemap";
fetch = {
type = "git";
url = "https://github.com/go-sourcemap/sourcemap";
rev = "b019cc30c1eaa584753491b0d8f8c1534bf1eb44";
sha256 = "03k44fdrnknba05f7cd58lq4rzk7jdpiqksmc0wxrdzwschrbgw8";
};
}
{
goPackagePath = "github.com/go-sql-driver/mysql";
fetch = {
type = "git";
url = "https://github.com/go-sql-driver/mysql";
rev = "147bd02c2c516cf9a8878cb75898ee8a9eea0228";
sha256 = "0s75nilz1jx0vgc69jgmys95lsq9j9nfdjcc8inc8mhzh3qpjb74";
};
}
{
goPackagePath = "github.com/gogo/protobuf";
fetch = {
type = "git";
url = "https://github.com/gogo/protobuf";
rev = "30433562cfbf487fe1df7cd26c7bab168d2f14d0";
sha256 = "155iv0jqgh0d8cykghw3ifwk8pjyyq1w4gr9khhf78n01k6180hj";
};
}
{
goPackagePath = "github.com/golang/glog";
fetch = {
type = "git";
url = "https://github.com/golang/glog";
rev = "23def4e6c14b4da8ac2ed8007337bc5eb5007998";
sha256 = "0jb2834rw5sykfr937fxi8hxi2zy80sj2bdn9b3jb4b26ksqng30";
};
}
{
goPackagePath = "github.com/golang/protobuf";
fetch = {
type = "git";
url = "https://github.com/golang/protobuf";
rev = "18c9bb3261723cd5401db4d0c9fbc5c3b6c70fe8";
sha256 = "0fbf8ymrcb23imkhlrlyq6i0x5w8gxzilljjsgd4hnvjgpgp3r4v";
};
}
{
goPackagePath = "github.com/golang/snappy";
fetch = {
type = "git";
url = "https://github.com/golang/snappy";
rev = "553a641470496b2327abcac10b36396bd98e45c9";
sha256 = "0kssxnih1l722hx9219c7javganjqkqhvl3i0hp0hif6xm6chvqk";
};
}
{
goPackagePath = "github.com/gopherjs/gopherjs";
fetch = {
type = "git";
url = "https://github.com/gopherjs/gopherjs";
rev = "558a9132744c22476178edf3126fd35a9754f565";
sha256 = "13mn0li83amgm4fgsm6l3shs2r4kjddr10xn0ydnr9ymg1y887vi";
};
}
{
goPackagePath = "github.com/gopherjs/jsbuiltin";
fetch = {
type = "git";
url = "https://github.com/gopherjs/jsbuiltin";
rev = "67703bfb044e3192fbcab025c3aeaeedafad1f2f";
sha256 = "1k0df0z9fiyzbr1g1736zdp238j9z82q3gwkk060h2n84rg4c7lh";
};
}
{
goPackagePath = "github.com/hashicorp/hcl";
fetch = {
type = "git";
url = "https://github.com/hashicorp/hcl";
rev = "7fa7fff964d035e8a162cce3a164b3ad02ad651b";
sha256 = "0p3dyhpc0ajakcww3a45n750z2030xqhlswzf51d5rzid27681wp";
};
}
{
goPackagePath = "github.com/imdario/mergo";
fetch = {
type = "git";
url = "https://github.com/imdario/mergo";
rev = "0d4b488675fdec1dde48751b05ab530cf0b630e1";
sha256 = "071rram7aib70f3gk4ansgwns82w9i6m1px8mgc8x4rs9ana4qhf";
};
}
{
goPackagePath = "github.com/inconshreveable/mousetrap";
fetch = {
type = "git";
url = "https://github.com/inconshreveable/mousetrap";
rev = "76626ae9c91c4f2a10f34cad8ce83ea42c93bb75";
sha256 = "1mn0kg48xkd74brf48qf5hzp0bc6g8cf5a77w895rl3qnlpfw152";
};
}
{
goPackagePath = "github.com/jackc/pgx";
fetch = {
type = "git";
url = "https://github.com/jackc/pgx";
rev = "606697ffdfe6603013560dbc171656de57b4f542";
sha256 = "0818yb2vjjwwmscdab7wnxbyiabvy544icdczdlr5kswbqq5h25m";
};
}
{
goPackagePath = "github.com/julienschmidt/httprouter";
fetch = {
type = "git";
url = "https://github.com/julienschmidt/httprouter";
rev = "6f3f3919c8781ce5c0509c83fffc887a7830c938";
sha256 = "1hmqdpv2zywwglmnjnxfn27mkac81n3nqs1wandlpybsww4vn4kx";
};
}
{
goPackagePath = "github.com/lib/pq";
fetch = {
type = "git";
url = "https://github.com/lib/pq";
rev = "2704adc878c21e1329f46f6e56a1c387d788ff94";
sha256 = "160fmvi7bczxw3i3h5s821hv029ph5ld8x3c36b4cz2sr30wp110";
};
}
{
goPackagePath = "github.com/linkeddata/gojsonld";
fetch = {
type = "git";
url = "https://github.com/linkeddata/gojsonld";
rev = "4f5db6791326b8962ede4edbba693edcf20fd1ad";
sha256 = "11g1kygkn55whaf49q2bzxk0w8b3nhdhiaixsj2ik65j8bl9g2cq";
};
}
{
goPackagePath = "github.com/magiconair/properties";
fetch = {
type = "git";
url = "https://github.com/magiconair/properties";
rev = "51463bfca2576e06c62a8504b5c0f06d61312647";
sha256 = "0d7hr78y8gg2mrm5z4jjgm2w3awkznz383b7wvyzk3l33jw6i288";
};
}
{
goPackagePath = "github.com/mitchellh/mapstructure";
fetch = {
type = "git";
url = "https://github.com/mitchellh/mapstructure";
rev = "cc8532a8e9a55ea36402aa21efdf403a60d34096";
sha256 = "0705c0hq7b993sabnjy65yymvpy9w1j84bg9bjczh5607z16nw86";
};
}
{
goPackagePath = "github.com/pborman/uuid";
fetch = {
type = "git";
url = "https://github.com/pborman/uuid";
rev = "1b00554d822231195d1babd97ff4a781231955c9";
sha256 = "0rjkcf85sagdwzsycj1bbjyx5bgmrc1i8l5qf1f44z24rhbbkaan";
};
}
{
goPackagePath = "github.com/pelletier/go-buffruneio";
fetch = {
type = "git";
url = "https://github.com/pelletier/go-buffruneio";
rev = "c37440a7cf42ac63b919c752ca73a85067e05992";
sha256 = "0l83p1gg6g5mmhmxjisrhfimhbm71lwn1r2w7d6siwwqm9q08sd2";
};
}
{
goPackagePath = "github.com/pelletier/go-toml";
fetch = {
type = "git";
url = "https://github.com/pelletier/go-toml";
rev = "fe206efb84b2bc8e8cfafe6b4c1826622be969e3";
sha256 = "1dlabfpnlzvwf4i86idy8ilqpjsl8yqfgdv0nv5cccm8gkcans5w";
};
}
{
goPackagePath = "github.com/peterh/liner";
fetch = {
type = "git";
url = "https://github.com/peterh/liner";
rev = "88609521dc4b6c858fd4c98b628147da928ce4ac";
sha256 = "0jacb2fqgiccb98v1875j5xvj01l1z2laga1kgr8lhd0nl22r96k";
};
}
{
goPackagePath = "github.com/pkg/errors";
fetch = {
type = "git";
url = "https://github.com/pkg/errors";
rev = "e881fd58d78e04cf6d0de1217f8707c8cc2249bc";
sha256 = "0vfhj598jp6dzy4pbyjdrqxzb5kppw8ggvfh78g80nz11r34xnzs";
};
}
{
goPackagePath = "github.com/pmezard/go-difflib";
fetch = {
type = "git";
url = "https://github.com/pmezard/go-difflib";
rev = "d8ed2627bdf02c080bf22230dbb337003b7aba2d";
sha256 = "0w1jp4k4zbnrxh3jvh8fgbjgqpf2hg31pbj8fb32kh26px9ldpbs";
};
}
{
goPackagePath = "github.com/russross/blackfriday";
fetch = {
type = "git";
url = "https://github.com/russross/blackfriday";
rev = "b253417e1cb644d645a0a3bb1fa5034c8030127c";
sha256 = "1knj8vabymhmkg12cj3hnpqf3b74wwrvqib12yczcvpi52xaqi20";
};
}
{
goPackagePath = "github.com/shurcooL/sanitized_anchor_name";
fetch = {
type = "git";
url = "https://github.com/shurcooL/sanitized_anchor_name";
rev = "79c90efaf01eddc01945af5bc1797859189b830b";
sha256 = "1dj8v91gv1ssw2j88gjzr1hw0n63qqxykjzfbvspyi529xn3ji3y";
};
}
{
goPackagePath = "github.com/spf13/afero";
fetch = {
type = "git";
url = "https://github.com/spf13/afero";
rev = "9be650865eab0c12963d8753212f4f9c66cdcf12";
sha256 = "12dhh6d07304lsjv7c4p95hkip0hnshqhwivdw39pbypgg0p8y34";
};
}
{
goPackagePath = "github.com/spf13/cast";
fetch = {
type = "git";
url = "https://github.com/spf13/cast";
rev = "acbeb36b902d72a7a4c18e8f3241075e7ab763e4";
sha256 = "0w25s6gjbbwv47b9208hysyqqphd6pib3d2phg24mjy4wigkm050";
};
}
{
goPackagePath = "github.com/spf13/cobra";
fetch = {
type = "git";
url = "https://github.com/spf13/cobra";
rev = "7b1b6e8dc027253d45fc029bc269d1c019f83a34";
sha256 = "1nhnlpmbqq1ggix7jaxmzr8awk1zrrzag4vzq1p5q5l25d6kih35";
};
}
{
goPackagePath = "github.com/spf13/jwalterweatherman";
fetch = {
type = "git";
url = "https://github.com/spf13/jwalterweatherman";
rev = "fa7ca7e836cf3a8bb4ebf799f472c12d7e903d66";
sha256 = "0404b7bzx7cq1b2bgdb3gs7gjzm4vvg1hl2y9mcm4m6vz56vbcz8";
};
}
{
goPackagePath = "github.com/spf13/pflag";
fetch = {
type = "git";
url = "https://github.com/spf13/pflag";
rev = "f1d95a35e132e8a1868023a08932b14f0b8b8fcb";
sha256 = "0fwvkyq36jvy2gid81031ll7qaj8jxr5g36fff7hhkp3hh4kz6zh";
};
}
{
goPackagePath = "github.com/spf13/viper";
fetch = {
type = "git";
url = "https://github.com/spf13/viper";
rev = "0967fc9aceab2ce9da34061253ac10fb99bba5b2";
sha256 = "016syis0rvccp2indjqi1vnz3wk7c9dhkvkgam0j79sb019kl80f";
};
}
{
goPackagePath = "github.com/stretchr/testify";
fetch = {
type = "git";
url = "https://github.com/stretchr/testify";
rev = "87b1dfb5b2fa649f52695dd9eae19abe404a4308";
sha256 = "1iyfxs3nxdn1fyfqv3gggxcxab66a3m6cmjkhqhcapxm3qvgbrlc";
};
}
{
goPackagePath = "github.com/syndtr/goleveldb";
fetch = {
type = "git";
url = "https://github.com/syndtr/goleveldb";
rev = "b89cc31ef7977104127d34c1bd31ebd1a9db2199";
sha256 = "0pbmssaw7fsgspv0jr3hsd1208qqxcvy4faks9hypqgl5gwday4p";
};
}
{
goPackagePath = "github.com/tylertreat/BoomFilters";
fetch = {
type = "git";
url = "https://github.com/tylertreat/BoomFilters";
rev = "37e169ae37ed529d93ecacb509c0dc80078478fc";
sha256 = "15wwdsxxvkgxbxv3v0ywnwjwndpmps49n3a49z7bzjl7r2nsm7qv";
};
}
{
goPackagePath = "golang.org/x/net";
fetch = {
type = "git";
url = "https://go.googlesource.com/net";
rev = "da118f7b8e5954f39d0d2130ab35d4bf0e3cb344";
sha256 = "09xpndqc6a2r0lw42cyl1pkhfddl01sd9c3qqjjwp3vmxm004whv";
};
}
{
goPackagePath = "golang.org/x/sys";
fetch = {
type = "git";
url = "https://go.googlesource.com/sys";
rev = "9ccfe848b9db8435a24c424abbc07a921adf1df5";
sha256 = "0wn3p7nrf9lx5svnya5mxy5b8cxqs2rp8lxc477szna313m1jhs4";
};
}
{
goPackagePath = "golang.org/x/text";
fetch = {
type = "git";
url = "https://go.googlesource.com/text";
rev = "470f45bf29f4147d6fbd7dfd0a02a848e49f5bf4";
sha256 = "1yzh1qxwd0xkh0k04hwp7yii21i26b4ngxvm1g98qlji1g2wbjbc";
};
}
{
goPackagePath = "google.golang.org/appengine";
fetch = {
type = "git";
url = "https://github.com/golang/appengine";
rev = "170382fa85b10b94728989dfcf6cc818b335c952";
sha256 = "0dqx24qc7h53p16xnkwn2jpk3wjjlvv48akqk74vx31pr2nn0g56";
};
}
{
goPackagePath = "gopkg.in/mgo.v2";
fetch = {
type = "git";
url = "https://github.com/go-mgo/mgo";
rev = "3f83fa5005286a7fe593b055f0d7771a7dce4655";
sha256 = "19vwb6qlcyh3nh6pkk0bynwmr5cmi6mm4hdz01lwb4ybnkzxryc7";
};
}
{
goPackagePath = "gopkg.in/olivere/elastic.v5";
fetch = {
type = "git";
url = "https://github.com/olivere/elastic";
rev = "79ff368708b3a2a9da641dc831d95fd0782bf4ef";
sha256 = "1lq8nhjnkf246nl5h40ldh1qz2yx73yaqfmsh9ddvkwn4173c7jj";
};
}
{
goPackagePath = "gopkg.in/yaml.v2";
fetch = {
type = "git";
url = "https://github.com/go-yaml/yaml";
rev = "cd8b52f8269e0feb286dfeef29f8fe4d5b397e0b";
sha256 = "1hj2ag9knxflpjibck0n90jrhsrqz7qvad4qnif7jddyapi9bqzl";
};
}
]

View file

@ -0,0 +1,75 @@
{ lib, stdenv, fetchFromGitHub, cmake, libtool, llvm-bintools, ninja
, boost, brotli, capnproto, cctz, clang-unwrapped, double-conversion
, icu, jemalloc, libcpuid, libxml2, lld, llvm, lz4, libmysqlclient, openssl, perl
, poco, protobuf, python3, rapidjson, re2, rdkafka, readline, sparsehash, unixODBC
, xxHash, zstd
, nixosTests
}:
stdenv.mkDerivation rec {
pname = "clickhouse";
version = "22.3.2.2";
broken = stdenv.buildPlatform.is32bit; # not supposed to work on 32-bit https://github.com/ClickHouse/ClickHouse/pull/23959#issuecomment-835343685
src = fetchFromGitHub {
owner = "ClickHouse";
repo = "ClickHouse";
rev = "v${version}-lts";
fetchSubmodules = true;
sha256 = "0rhzgm0gvwpx4h5xyr7y393y7s9slcr4a7grw9316f5m70frxg2v";
};
nativeBuildInputs = [ cmake libtool llvm-bintools ninja ];
buildInputs = [
boost brotli capnproto cctz clang-unwrapped double-conversion
icu jemalloc libxml2 lld llvm lz4 libmysqlclient openssl perl
poco protobuf python3 rapidjson re2 rdkafka readline sparsehash unixODBC
xxHash zstd
] ++ lib.optional stdenv.hostPlatform.isx86 libcpuid;
postPatch = ''
patchShebangs src/
substituteInPlace src/Storages/System/StorageSystemLicenses.sh \
--replace 'git rev-parse --show-toplevel' '$src'
substituteInPlace utils/check-style/check-duplicate-includes.sh \
--replace 'git rev-parse --show-toplevel' '$src'
substituteInPlace utils/check-style/check-ungrouped-includes.sh \
--replace 'git rev-parse --show-toplevel' '$src'
substituteInPlace utils/list-licenses/list-licenses.sh \
--replace 'git rev-parse --show-toplevel' '$src'
substituteInPlace utils/check-style/check-style \
--replace 'git rev-parse --show-toplevel' '$src'
'';
cmakeFlags = [
"-DENABLE_TESTS=OFF"
"-DENABLE_EMBEDDED_COMPILER=ON"
"-USE_INTERNAL_LLVM_LIBRARY=OFF"
];
postInstall = ''
rm -rf $out/share/clickhouse-test
sed -i -e '\!<log>/var/log/clickhouse-server/clickhouse-server\.log</log>!d' \
$out/etc/clickhouse-server/config.xml
substituteInPlace $out/etc/clickhouse-server/config.xml \
--replace "<errorlog>/var/log/clickhouse-server/clickhouse-server.err.log</errorlog>" "<console>1</console>"
'';
hardeningDisable = [ "format" ];
# Builds in 7+h with 2 cores, and ~20m with a big-parallel builder.
requiredSystemFeatures = [ "big-parallel" ];
passthru.tests.clickhouse = nixosTests.clickhouse;
meta = with lib; {
homepage = "https://clickhouse.tech/";
description = "Column-oriented database management system";
license = licenses.asl20;
maintainers = with maintainers; [ orivej ];
platforms = platforms.linux;
};
}

View file

@ -0,0 +1,257 @@
{ lib, stdenv, fetchFromGitHub, buildGoModule, makeWrapper, runCommand
, cacert, moreutils, jq, git, rsync, pkg-config, yarn, python3
, esbuild, nodejs-14_x, node-gyp, libsecret, xorg, ripgrep
, AppKit, Cocoa, CoreServices, Security, cctools, xcbuild }:
let
system = stdenv.hostPlatform.system;
nodejs = nodejs-14_x;
python = python3;
yarn' = yarn.override { inherit nodejs; };
defaultYarnOpts = [ "frozen-lockfile" "non-interactive" "no-progress"];
# replaces esbuild's download script with a binary from nixpkgs
patchEsbuild = path : version : ''
mkdir -p ${path}/node_modules/esbuild/bin
jq "del(.scripts.postinstall)" ${path}/node_modules/esbuild/package.json | sponge ${path}/node_modules/esbuild/package.json
sed -i 's/${version}/${esbuild.version}/g' ${path}/node_modules/esbuild/lib/main.js
ln -s -f ${esbuild}/bin/esbuild ${path}/node_modules/esbuild/bin/esbuild
'';
in stdenv.mkDerivation rec {
pname = "code-server";
version = "4.0.1";
commit = "7fe23daf009e5234eaa54a1ea5ff26df384c47ac";
src = fetchFromGitHub {
owner = "cdr";
repo = "code-server";
rev = "v${version}";
sha256 = "1s3dcmzlkyh7qfs3ai1p7dlp45iys0ax1fbxxz17p395pw9anrrl";
};
cloudAgent = buildGoModule rec {
pname = "cloud-agent";
version = "0.2.3";
src = fetchFromGitHub {
owner = "cdr";
repo = "cloud-agent";
rev = "v${version}";
sha256 = "14i1qq273f0yn5v52ryiqwj7izkd1yd212di4gh4bqypmmzhw3jj";
};
vendorSha256 = "0k9v10wkzx53r5syf6bmm81gr4s5dalyaa07y9zvx6vv5r2h0661";
postPatch = ''
# the cloud-agent release tag has an empty version string, so add it back in
substituteInPlace internal/version/version.go \
--replace 'var Version string' 'var Version string = "v${version}"'
'';
};
yarnCache = stdenv.mkDerivation {
name = "${pname}-${version}-${system}-yarn-cache";
inherit src;
nativeBuildInputs = [ yarn' git cacert ];
buildPhase = ''
export HOME=$PWD
export GIT_SSL_CAINFO="${cacert}/etc/ssl/certs/ca-bundle.crt"
yarn --cwd "./vendor" install --modules-folder modules --ignore-scripts --frozen-lockfile
yarn config set yarn-offline-mirror $out
find "$PWD" -name "yarn.lock" -printf "%h\n" | \
xargs -I {} yarn --cwd {} \
--frozen-lockfile --ignore-scripts --ignore-platform \
--ignore-engines --no-progress --non-interactive
'';
outputHashMode = "recursive";
outputHashAlgo = "sha256";
# to get hash values use nix-build -A code-server.prefetchYarnCache
outputHash = "0qmfsirld1qfl2s26rxbpmvxsyj2pvzkgk8w89zlrgbhgc5fj8p9";
};
nativeBuildInputs = [
nodejs yarn' python pkg-config makeWrapper git rsync jq moreutils
];
buildInputs = lib.optionals (!stdenv.isDarwin) [ libsecret ]
++ (with xorg; [ libX11 libxkbfile ])
++ lib.optionals stdenv.isDarwin [
AppKit Cocoa CoreServices Security cctools xcbuild
];
patches = [
# remove download of coder-cloud agent
./remove-cloud-agent-download.patch
];
postPatch = ''
export HOME=$PWD
patchShebangs ./ci
# inject git commit
substituteInPlace ci/build/build-release.sh \
--replace '$(git rev-parse HEAD)' "$commit"
'';
configurePhase = ''
# run yarn offline by default
echo '--install.offline true' >> .yarnrc
# set default yarn opts
${lib.concatMapStrings (option: ''
yarn --offline config set ${option}
'') defaultYarnOpts}
# set offline mirror to yarn cache we created in previous steps
yarn --offline config set yarn-offline-mirror "${yarnCache}"
# link coder-cloud agent from nix store
mkdir -p lib
ln -s "${cloudAgent}/bin/cloud-agent" ./lib/coder-cloud-agent
# skip unnecessary electron download
export ELECTRON_SKIP_BINARY_DOWNLOAD=1
# set nodedir to prevent node-gyp from downloading headers
# taken from https://nixos.org/manual/nixpkgs/stable/#javascript-tool-specific
mkdir -p $HOME/.node-gyp/${nodejs.version}
echo 9 > $HOME/.node-gyp/${nodejs.version}/installVersion
ln -sfv ${nodejs}/include $HOME/.node-gyp/${nodejs.version}
export npm_config_nodedir=${nodejs}
# use updated node-gyp. fixes the following error on Darwin:
# PermissionError: [Errno 1] Operation not permitted: '/usr/sbin/pkgutil'
export npm_config_node_gyp=${node-gyp}/lib/node_modules/node-gyp/bin/node-gyp.js
'';
buildPhase = ''
# install code-server dependencies
yarn --offline --ignore-scripts
# patch shebangs of everything to allow binary packages to build
patchShebangs .
# Skip shellcheck download
jq "del(.scripts.preinstall)" node_modules/shellcheck/package.json | sponge node_modules/shellcheck/package.json
# rebuild binary packages now that scripts have been patched
npm rebuild
# Replicate ci/dev/postinstall.sh
echo "----- Replicate ci/dev/postinstall.sh"
yarn --cwd "./vendor" install --modules-folder modules --offline --ignore-scripts --frozen-lockfile
# Replicate vendor/postinstall.sh
echo " ----- Replicate vendor/postinstall.sh"
yarn --cwd "./vendor/modules/code-oss-dev" --offline --frozen-lockfile --ignore-scripts install
# remove all built-in extensions, as these are 3rd party extensions that
# get downloaded from vscode marketplace
jq --slurp '.[0] * .[1]' "vendor/modules/code-oss-dev/product.json" <(
cat << EOF
{
"builtInExtensions": []
}
EOF
) | sponge vendor/modules/code-oss-dev/product.json
# disable automatic updates
sed -i '/update.mode/,/\}/{s/default:.*/default: "none",/g}' \
vendor/modules/code-oss-dev/src/vs/platform/update/common/update.config.contribution.ts
# put ripgrep binary into bin, so postinstall does not try to download it
find -name vscode-ripgrep -type d \
-execdir mkdir -p {}/bin \; \
-execdir ln -s ${ripgrep}/bin/rg {}/bin/rg \;
# Playwright is only needed for tests, we can disable it for builds.
# There's an environment variable to disable downloads, but the package makes a breaking call to
# sw_vers before that variable is checked.
patch -p1 -i ${./playwright.patch}
# Patch out remote download of nodejs from build script
patch -p1 -i ${./remove-node-download.patch}
# Replicate install vscode dependencies without running script for all vscode packages
# that require patching for postinstall scripts to succeed
find ./vendor/modules/code-oss-dev -path "*node_modules" -prune -o \
-path "./*/*/*/*/*" -name "yarn.lock" -printf "%h\n" | \
xargs -I {} yarn --cwd {} \
--frozen-lockfile --offline --ignore-scripts --ignore-engines
# patch shebangs of everything to allow binary packages to build
patchShebangs .
${patchEsbuild "./vendor/modules/code-oss-dev/build" "0.12.6"}
${patchEsbuild "./vendor/modules/code-oss-dev/extensions" "0.11.23"}
'' + lib.optionalString stdenv.isDarwin ''
# use prebuilt binary for @parcel/watcher, which requires macOS SDK 10.13+
# (see issue #101229)
pushd ./vendor/modules/code-oss-dev/remote/node_modules/@parcel/watcher
mkdir -p ./build/Release
mv ./prebuilds/darwin-x64/node.napi.glibc.node ./build/Release/watcher.node
jq "del(.scripts) | .gypfile = false" ./package.json | sponge ./package.json
popd
'' + ''
# rebuild binaries, we use npm here, as yarn does not provide an alternative
# that would not attempt to try to reinstall everything and break our
# patching attempts
npm rebuild --prefix vendor/modules/code-oss-dev --update-binary
# run postinstall scripts after patching
find ./vendor/modules/code-oss-dev -path "*node_modules" -prune -o \
-path "./*/*/*/*/*" -name "yarn.lock" -printf "%h\n" | \
xargs -I {} sh -c 'jq -e ".scripts.postinstall" {}/package.json >/dev/null && yarn --cwd {} postinstall --frozen-lockfile --offline || true'
# build code-server
yarn build
# build vscode
yarn build:vscode
# create release
yarn release
'';
installPhase = ''
mkdir -p $out/libexec/code-server $out/bin
# copy release to libexec path
cp -R -T release "$out/libexec/code-server"
# install only production dependencies
yarn --offline --cwd "$out/libexec/code-server" --production
# link coder-cloud agent from nix store
mkdir -p $out/libexec/code-server/lib
ln -s "${cloudAgent}/bin/cloud-agent" $out/libexec/code-server/lib/coder-cloud-agent
# create wrapper
makeWrapper "${nodejs-14_x}/bin/node" "$out/bin/code-server" \
--add-flags "$out/libexec/code-server/out/node/entry.js"
'';
passthru = {
prefetchYarnCache = lib.overrideDerivation yarnCache (d: {
outputHash = lib.fakeSha256;
});
};
meta = with lib; {
description = "Run VS Code on a remote server";
longDescription = ''
code-server is VS Code running on a remote server, accessible through the
browser.
'';
homepage = "https://github.com/cdr/code-server";
license = licenses.mit;
maintainers = with maintainers; [ offline ];
platforms = [ "x86_64-linux" "aarch64-linux" "x86_64-darwin" ];
};
}

View file

@ -0,0 +1,10 @@
--- ./vendor/modules/code-oss-dev/node_modules/playwright/install.js
+++ ./vendor/modules/code-oss-dev/node_modules/playwright/install.js
@@ -14,6 +14,4 @@
* limitations under the License.
*/
-const { installDefaultBrowsersForNpmInstall } = require('playwright-core/lib/utils/registry');
-
-installDefaultBrowsersForNpmInstall();
+process.stdout.write('Browser install disabled by Nix build script\n');

View file

@ -0,0 +1,17 @@
--- ./ci/build/npm-postinstall.sh
+++ ./ci/build/npm-postinstall.sh
@@ -58,14 +58,6 @@
OS="$(uname | tr '[:upper:]' '[:lower:]')"
- mkdir -p ./lib
-
- if curl -fsSL "https://github.com/cdr/cloud-agent/releases/latest/download/cloud-agent-$OS-$ARCH" -o ./lib/coder-cloud-agent; then
- chmod +x ./lib/coder-cloud-agent
- else
- echo "Failed to download cloud agent; --link will not work"
- fi
-
if ! vscode_yarn; then
echo "You may not have the required dependencies to build the native modules."
echo "Please see https://github.com/cdr/code-server/blob/master/docs/npm.md"

View file

@ -0,0 +1,27 @@
--- ./vendor/modules/code-oss-dev/build/gulpfile.reh.js
+++ ./vendor/modules/code-oss-dev/build/gulpfile.reh.js
@@ -277,8 +277,6 @@
.pipe(util.stripSourceMappingURL())
.pipe(jsFilter.restore);
- const nodePath = `.build/node/v${nodeVersion}/${platform}-${platform === 'darwin' ? 'x64' : arch}`;
- const node = gulp.src(`${nodePath}/**`, { base: nodePath, dot: true });
let web = [];
if (type === 'reh-web') {
@@ -296,7 +294,6 @@
license,
sources,
deps,
- node,
...web
);
@@ -376,7 +373,6 @@
const destinationFolderName = `vscode-${type}${dashed(platform)}${dashed(arch)}`;
const serverTaskCI = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}${dashed(minified)}-ci`, task.series(
- gulp.task(`node-${platform}-${platform === 'darwin' ? 'x64' : arch}`),
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
packageTask(type, platform, arch, sourceFolderName, destinationFolderName)
));

View file

@ -0,0 +1,31 @@
{ lib, stdenv, fetchFromGitHub, slurm } :
stdenv.mkDerivation rec {
pname = "slurm-spank-stunnel";
version = "0.2.2";
src = fetchFromGitHub {
owner = "stanford-rc";
repo = "slurm-spank-stunnel";
rev = version;
sha256 = "15cpd49ccvzsmmr3gk8svm2nz461rvs4ybczckyf4yla0xzp06gj";
};
buildPhase = ''
gcc -I${slurm.dev}/include -shared -fPIC -o stunnel.so slurm-spank-stunnel.c
'';
installPhase = ''
mkdir -p $out/lib $out/etc/slurm/plugstack.conf.d
install -m 755 stunnel.so $out/lib
install -m 644 plugstack.conf $out/etc/slurm/plugstack.conf.d/stunnel.conf.example
'';
meta = with lib; {
homepage = "https://github.com/stanford-rc/slurm-spank-stunnel";
description = "Plugin for SLURM for SSH tunneling and port forwarding support";
platforms = platforms.linux;
license = licenses.gpl3Only;
maintainers = with maintainers; [ markuskowa ];
};
}

View file

@ -0,0 +1,36 @@
{ lib, stdenv, fetchFromGitHub, slurm } :
stdenv.mkDerivation rec {
pname = "slurm-spank-x11";
version = "0.2.5";
src = fetchFromGitHub {
owner = "hautreux";
repo = "slurm-spank-x11";
rev = version;
sha256 = "1dmsr7whxcxwnlvl1x4s3bqr5cr6q5ssb28vqi67w5hj4sshisry";
};
buildPhase = ''
gcc -DX11_LIBEXEC_PROG="\"$out/bin/slurm-spank-x11\"" \
-g -o slurm-spank-x11 slurm-spank-x11.c
gcc -I${slurm.dev}/include -DX11_LIBEXEC_PROG="\"$out/bin/slurm-spank-x11\"" -shared -fPIC \
-g -o x11.so slurm-spank-x11-plug.c
'';
installPhase = ''
mkdir -p $out/bin $out/lib
install -m 755 slurm-spank-x11 $out/bin
install -m 755 x11.so $out/lib
'';
meta = with lib; {
homepage = "https://github.com/hautreux/slurm-spank-x11";
description = "Plugin for SLURM to allow for interactive X11 sessions";
platforms = platforms.linux;
license = licenses.gpl3Only;
maintainers = with maintainers; [ markuskowa ];
};
}

View file

@ -0,0 +1,13 @@
diff --git a/src/common/env.c b/src/common/env.c
index 987846d..73d3b3b 100644
--- a/src/common/env.c
+++ b/src/common/env.c
@@ -1941,7 +1941,7 @@ char **env_array_user_default(const char *username, int timeout, int mode,
char **env = NULL;
char *starttoken = "XXXXSLURMSTARTPARSINGHEREXXXX";
char *stoptoken = "XXXXSLURMSTOPPARSINGHEREXXXXX";
- char cmdstr[256], *env_loc = NULL;
+ char cmdstr[MAXPATHLEN], *env_loc = NULL;
char *stepd_path = NULL;
int fd1, fd2, fildes[2], found, fval, len, rc, timeleft;
int buf_read, buf_rem, config_timeout;

View file

@ -0,0 +1,94 @@
{ lib, stdenv, fetchFromGitHub, pkg-config, libtool, curl
, python3, munge, perl, pam, shadow, coreutils, dbus, libbpf
, ncurses, libmysqlclient, gtk2, lua, hwloc, numactl
, readline, freeipmi, xorg, lz4, rdma-core, nixosTests
, pmix
, libjwt
, libyaml
, json_c
# enable internal X11 support via libssh2
, enableX11 ? true
}:
stdenv.mkDerivation rec {
pname = "slurm";
version = "22.05.0.1";
# N.B. We use github release tags instead of https://www.schedmd.com/downloads.php
# because the latter does not keep older releases.
src = fetchFromGitHub {
owner = "SchedMD";
repo = "slurm";
# The release tags use - instead of .
rev = "${pname}-${builtins.replaceStrings ["."] ["-"] version}";
sha256 = "0bc8kycrc5a8kqffbd03k22z38f7z8fj725iniq8hz6srhf5nxgs";
};
outputs = [ "out" "dev" ];
patches = [
# increase string length to allow for full
# path of 'echo' in nix store
./common-env-echo.patch
# Required for configure to pick up the right dlopen path
./pmix-configure.patch
];
prePatch = ''
substituteInPlace src/common/env.c \
--replace "/bin/echo" "${coreutils}/bin/echo"
'' + (lib.optionalString enableX11 ''
substituteInPlace src/common/x11_util.c \
--replace '"/usr/bin/xauth"' '"${xorg.xauth}/bin/xauth"'
'');
# nixos test fails to start slurmd with 'undefined symbol: slurm_job_preempt_mode'
# https://groups.google.com/forum/#!topic/slurm-devel/QHOajQ84_Es
# this doesn't fix tests completely at least makes slurmd to launch
hardeningDisable = [ "bindnow" ];
nativeBuildInputs = [ pkg-config libtool python3 ];
buildInputs = [
curl python3 munge perl pam
libmysqlclient ncurses gtk2 lz4 rdma-core
lua hwloc numactl readline freeipmi shadow.su
pmix json_c libjwt libyaml dbus libbpf
] ++ lib.optionals enableX11 [ xorg.xauth ];
configureFlags = with lib;
[ "--with-freeipmi=${freeipmi}"
"--with-hwloc=${hwloc.dev}"
"--with-json=${json_c.dev}"
"--with-jwt=${libjwt}"
"--with-lz4=${lz4.dev}"
"--with-munge=${munge}"
"--with-yaml=${libyaml}"
"--with-ofed=${rdma-core}"
"--sysconfdir=/etc/slurm"
"--with-pmix=${pmix}"
"--with-bpf=${libbpf}"
] ++ (optional (gtk2 == null) "--disable-gtktest")
++ (optional (!enableX11) "--disable-x11");
preConfigure = ''
patchShebangs ./doc/html/shtml2html.py
patchShebangs ./doc/man/man2html.py
'';
postInstall = ''
rm -f $out/lib/*.la $out/lib/slurm/*.la
'';
enableParallelBuilding = true;
passthru.tests.slurm = nixosTests.slurm;
meta = with lib; {
homepage = "http://www.schedmd.com/";
description = "Simple Linux Utility for Resource Management";
platforms = platforms.linux;
license = licenses.gpl2Only;
maintainers = with maintainers; [ jagajaga markuskowa ];
};
}

View file

@ -0,0 +1,13 @@
diff --git a/configure b/configure
index 1cf53bc..ab68441 100755
--- a/configure
+++ b/configure
@@ -21207,7 +21207,7 @@ rm -f conftest.err conftest.i conftest.$ac_ext
as_fn_error $? "error processing $x_ac_cv_pmix_libdir: PMIx v3.x was already found in one of the previous paths" "$LINENO" 5
fi
_x_ac_pmix_v3_found="1"
- PMIX_V3_CPPFLAGS="-I$x_ac_cv_pmix_dir/include"
+ PMIX_V3_CPPFLAGS="-I$x_ac_cv_pmix_dir/include -DPMIXP_V3_LIBPATH=\\\"$x_ac_cv_pmix_libdir\\\""
if test "$ac_with_rpath" = "yes"; then
PMIX_V3_LDFLAGS="-Wl,-rpath -Wl,$x_ac_cv_pmix_libdir -L$x_ac_cv_pmix_libdir"
else

View file

@ -0,0 +1,70 @@
{ stdenv, lib, fetchurl, zip, unzip
, jdk, python2
, confFile ? ""
, extraLibraryPaths ? []
, extraJars ? []
}:
stdenv.mkDerivation rec {
pname = "apache-storm";
version = "2.4.0";
name = "${pname}-${version}";
src = fetchurl {
url = "mirror://apache/storm/${name}/${name}.tar.gz";
sha256 = "sha256-VFNcaISPBRMGR5l/P6/pGnK7lHClDW2AmXJ00gzxwMY=";
};
nativeBuildInputs = [ zip unzip ];
installPhase = ''
mkdir -p $out/share/${name}
mv public $out/docs
mv examples $out/share/${name}/.
mv external extlib* lib $out/.
mv conf bin $out/.
mv log4j2 $out/conf/.
'';
fixupPhase = ''
# Fix python reference
sed -i \
-e '19iPYTHON=${python2}/bin/python' \
-e 's|#!/usr/bin/.*python|#!${python2}/bin/python|' \
$out/bin/storm
sed -i \
-e 's|#!/usr/bin/.*python|#!${python2}/bin/python|' \
-e "s|STORM_CONF_DIR = .*|STORM_CONF_DIR = os.getenv('STORM_CONF_DIR','$out/conf')|" \
-e 's|STORM_LOG4J2_CONF_DIR =.*|STORM_LOG4J2_CONF_DIR = os.path.join(STORM_CONF_DIR, "log4j2")|' \
$out/bin/storm.py
# Default jdk location
sed -i -e 's|#.*export JAVA_HOME=.*|export JAVA_HOME="${jdk.home}"|' \
$out/conf/storm-env.sh
ls -lh $out/lib
unzip $out/lib/storm-client-${version}.jar defaults.yaml;
zip -d $out/lib/storm-client-${version}.jar defaults.yaml;
sed -i \
-e 's|java.library.path: .*|java.library.path: "${lib.concatStringsSep ":" extraLibraryPaths}"|' \
-e 's|storm.log4j2.conf.dir: .*|storm.log4j2.conf.dir: "conf/log4j2"|' \
defaults.yaml
${if confFile != "" then "cat ${confFile} >> defaults.yaml" else ""}
mv defaults.yaml $out/conf;
# Link to extra jars
cd $out/lib;
${lib.concatMapStrings (jar: "ln -s ${jar};\n") extraJars}
'';
dontStrip = true;
meta = with lib; {
homepage = "https://storm.apache.org/";
description = "Distributed realtime computation system";
sourceProvenance = with sourceTypes; [ binaryBytecode ];
license = licenses.asl20;
maintainers = with maintainers; [ edwtjo vizanto ];
platforms = with platforms; unix;
};
}

View file

@ -0,0 +1,73 @@
{ lib, stdenv, fetchFromGitHub, openssl, flex, bison, pkg-config, groff, libxml2, util-linux
, coreutils, file, libtool, which, boost, autoreconfHook
}:
stdenv.mkDerivation rec {
pname = "torque";
version = "6.1.3h2";
src = fetchFromGitHub {
owner = "adaptivecomputing";
repo = pname;
# branch 6.1.3h2, as they aren't pushing tags
# https://github.com/adaptivecomputing/torque/issues/467
rev = "458883319157cfc5c509046d09f9eb8e68e8d398";
sha256 = "1b56bc5j9wg87kcywzmhf7234byyrwax9v1pqsr9xmv2x7saakrr";
};
strictDeps = true;
nativeBuildInputs = [ autoreconfHook pkg-config flex bison libxml2 ];
buildInputs = [
openssl groff libxml2 util-linux libtool
which boost
];
enableParallelBuilding = true;
# added to fix build with gcc7
NIX_CFLAGS_COMPILE = "-Wno-error -fpermissive";
postPatch = ''
substituteInPlace Makefile.am \
--replace "contrib/init.d contrib/systemd" ""
substituteInPlace src/cmds/Makefile.am \
--replace "/etc/" "$out/etc/"
substituteInPlace src/mom_rcp/pathnames.h \
--replace /bin/cp ${coreutils}/bin/cp
substituteInPlace src/resmom/requests.c \
--replace /bin/cp ${coreutils}/bin/cp
'';
preConfigure = ''
substituteInPlace ./configure \
--replace '/usr/bin/file' '${file}/bin/file'
# fix broken libxml2 detection
sed -i '/xmlLib\=/c\xmlLib=xml2' ./configure
for s in fifo cray_t3e dec_cluster msic_cluster sgi_origin umn_cluster; do
substituteInPlace src/scheduler.cc/samples/$s/Makefile.in \
--replace "schedprivdir = " "schedprivdir = $out/"
done
for f in $(find ./ -name Makefile.in); do
echo patching $f...
sed -i $f -e '/PBS_MKDIRS/d' -e '/chmod u+s/d'
done
patchShebangs buildutils
'';
postInstall = ''
cp -v buildutils/pbs_mkdirs $out/bin/
cp -v torque.setup $out/bin/
chmod +x $out/bin/pbs_mkdirs $out/bin/torque.setup
'';
meta = with lib; {
homepage = "http://www.adaptivecomputing.com/products/open-source/torque";
description = "Resource management system for submitting and controlling jobs on supercomputers, clusters, and grids";
platforms = platforms.linux;
license = "TORQUEv1.1";
};
}

View file

@ -0,0 +1,45 @@
{ stdenv, lib, fetchurl, fetchFromGitHub
, jre, makeWrapper, bash, gnused }:
stdenv.mkDerivation rec {
pname = "confluent-platform";
version = "7.1.0";
src = fetchurl {
url = "https://packages.confluent.io/archive/${lib.versions.majorMinor version}/confluent-${version}.tar.gz";
sha256 = "sha256-uoJQQZxUGniMLJk/BwwiNnpgYFcqJ+27GFQLEpsFxCw=";
};
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ jre bash ];
installPhase = ''
mkdir -p $out
cp -R bin etc share src $out
rm -rf $out/bin/windows
patchShebangs $out/bin
# allow us the specify logging directory using env
substituteInPlace $out/bin/kafka-run-class \
--replace 'LOG_DIR="$base_dir/logs"' 'LOG_DIR="$KAFKA_LOG_DIR"'
substituteInPlace $out/bin/ksql-run-class \
--replace 'LOG_DIR="$base_dir/logs"' 'LOG_DIR="$KAFKA_LOG_DIR"'
for p in $out/bin\/*; do
wrapProgram $p \
--set JAVA_HOME "${jre}" \
--set KAFKA_LOG_DIR "/tmp/apache-kafka-logs" \
--prefix PATH : "${jre}/bin:${bash}/bin:${gnused}/bin"
done
'';
meta = with lib; {
homepage = "https://www.confluent.io/";
description = "Confluent event streaming platform based on Apache Kafka";
license = licenses.asl20;
maintainers = with maintainers; [ zoedsoupe ];
platforms = platforms.unix;
};
}

View file

@ -0,0 +1,46 @@
{ lib, buildGoModule, fetchFromGitHub, nixosTests }:
buildGoModule rec {
pname = "consul";
version = "1.12.2";
rev = "v${version}";
# Note: Currently only release tags are supported, because they have the Consul UI
# vendored. See
# https://github.com/NixOS/nixpkgs/pull/48714#issuecomment-433454834
# If you want to use a non-release commit as `src`, you probably want to improve
# this derivation so that it can build the UI's JavaScript from source.
# See https://github.com/NixOS/nixpkgs/pull/49082 for something like that.
# Or, if you want to patch something that doesn't touch the UI, you may want
# to apply your changes as patches on top of a release commit.
src = fetchFromGitHub {
owner = "hashicorp";
repo = pname;
inherit rev;
sha256 = "sha256-X2vW6fnT5IY+YxGnAP5Flk610vIwLg3Qu/rEerFyNCw=";
};
passthru.tests.consul = nixosTests.consul;
# This corresponds to paths with package main - normally unneeded but consul
# has a split module structure in one repo
subPackages = ["." "connect/certgen"];
vendorSha256 = "sha256-Qcm+uPlvzg0r+a/rYVCUaQ7iIgCpW7MyL7KrHkNm4XQ=";
doCheck = false;
ldflags = [
"-X github.com/hashicorp/consul/version.GitDescribe=v${version}"
"-X github.com/hashicorp/consul/version.Version=${version}"
"-X github.com/hashicorp/consul/version.VersionPrerelease="
];
meta = with lib; {
description = "Tool for service discovery, monitoring and configuration";
homepage = "https://www.consul.io/";
platforms = platforms.linux ++ platforms.darwin;
license = licenses.mpl20;
maintainers = with maintainers; [ pradeepchhetri vdemeester nh2 techknowlogick];
};
}

View file

@ -0,0 +1,79 @@
{ lib, stdenv, fetchurl, makeWrapper, pkg-config, kronosnet, nss, nspr, libqb
, systemd, dbus, rdma-core, libstatgrab, net-snmp
, enableDbus ? false
, enableInfiniBandRdma ? false
, enableMonitoring ? false
, enableSnmp ? false
}:
with lib;
stdenv.mkDerivation rec {
pname = "corosync";
version = "3.1.6";
src = fetchurl {
url = "http://build.clusterlabs.org/corosync/releases/${pname}-${version}.tar.gz";
sha256 = "sha256-ym7TK01/M+1hSvzodg/ljQ3pLGi1ddSWnrrNiS89Hic=";
};
nativeBuildInputs = [ makeWrapper pkg-config ];
buildInputs = [
kronosnet nss nspr libqb systemd.dev
] ++ optional enableDbus dbus
++ optional enableInfiniBandRdma rdma-core
++ optional enableMonitoring libstatgrab
++ optional enableSnmp net-snmp;
configureFlags = [
"--sysconfdir=/etc"
"--localstatedir=/var"
"--with-logdir=/var/log/corosync"
"--enable-watchdog"
"--enable-qdevices"
# allows Type=notify in the systemd service
"--enable-systemd"
] ++ optional enableDbus "--enable-dbus"
++ optional enableInfiniBandRdma "--enable-rdma"
++ optional enableMonitoring "--enable-monitoring"
++ optional enableSnmp "--enable-snmp";
installFlags = [
"sysconfdir=$(out)/etc"
"localstatedir=$(out)/var"
"COROSYSCONFDIR=$(out)/etc/corosync"
"INITDDIR=$(out)/etc/init.d"
"LOGROTATEDIR=$(out)/etc/logrotate.d"
];
enableParallelBuilding = true;
preConfigure = optionalString enableInfiniBandRdma ''
# configure looks for the pkg-config files
# of librdmacm and libibverbs
# Howver, rmda-core does not provide a pkg-config file
# We give the flags manually here:
export rdmacm_LIBS=-lrdmacm
export rdmacm_CFLAGS=" "
export ibverbs_LIBS=-libverbs
export ibverbs_CFLAGS=" "
'';
postInstall = ''
wrapProgram $out/bin/corosync-blackbox \
--prefix PATH ":" "$out/sbin:${libqb}/sbin"
'';
passthru.tests = {
inherit (nixosTests) pacemaker;
};
meta = {
homepage = "http://corosync.org/";
description = "A Group Communication System with features for implementing high availability within applications";
license = licenses.bsd3;
platforms = platforms.linux;
maintainers = with maintainers; [ montag451 ryantm ];
};
}

View file

@ -0,0 +1,55 @@
{ lib
, stdenv
, fetchFromGitHub
, fetchpatch
, openssl
, libevent
, pkg-config
, libprom
, libpromhttp
, libmicrohttpd
, nixosTests
}:
stdenv.mkDerivation rec {
pname = "coturn";
version = "4.5.2";
src = fetchFromGitHub {
owner = "coturn";
repo = "coturn";
rev = version;
sha256 = "1s7ncc82ny4bb3qkn3fqr0144xsr7h2y8xmzsf5037h6j8f7j3v8";
};
nativeBuildInputs = [ pkg-config ];
buildInputs = [
openssl
libevent
libprom
libpromhttp
libmicrohttpd
];
patches = [
./pure-configure.patch
];
# Workaround build failure on -fno-common toolchains like upstream
# gcc-10. Otherwise build fails as:
# ld: ...-libprom-0.1.1/include/prom_collector_registry.h:37: multiple definition of
# `PROM_COLLECTOR_REGISTRY_DEFAULT'; ...-libprom-0.1.1/include/prom_collector_registry.h:37: first defined here
# Should be fixed in libprom-1.2.0 and later: https://github.com/digitalocean/prometheus-client-c/pull/25
NIX_CFLAGS_COMPILE = "-fcommon";
passthru.tests.coturn = nixosTests.coturn;
meta = with lib; {
homepage = "https://coturn.net/";
license = with licenses; [ bsd3 ];
description = "A TURN server";
platforms = platforms.all;
broken = stdenv.isDarwin; # 2018-10-21
maintainers = with maintainers; [ ralith _0x4A6F ];
};
}

View file

@ -0,0 +1,17 @@
diff --git a/configure b/configure
index 28a0625..ea25488 100755
--- a/configure
+++ b/configure
@@ -624,12 +624,6 @@ fi
TMPDIR="."
-if [ -d /var/tmp ] ; then
- TMPDIR="/var/tmp"
-elif [ -d /tmp ] ; then
- TMPDIR=/tmp
-fi
-
${ECHO_CMD} Use TMP dir ${TMPDIR}
#########################

View file

@ -0,0 +1,39 @@
{ lib, stdenv, fetchurl, fetchpatch, pam, libkrb5, cyrus_sasl, miniupnpc, autoreconfHook }:
stdenv.mkDerivation rec {
pname = "dante";
version = "1.4.3";
src = fetchurl {
url = "https://www.inet.no/dante/files/${pname}-${version}.tar.gz";
sha256 = "0pbahkj43rx7rmv2x40mf5p3g3x9d6i2sz7pzglarf54w5ghd2j1";
};
nativeBuildInputs = lib.optional stdenv.hostPlatform.isMips64 autoreconfHook;
buildInputs = [ pam libkrb5 cyrus_sasl miniupnpc ];
configureFlags = if !stdenv.isDarwin
then [ "--with-libc=libc.so.6" ]
else [ "--with-libc=libc${stdenv.targetPlatform.extensions.sharedLibrary}" ];
dontAddDisableDepTrack = stdenv.isDarwin;
patches = lib.optional stdenv.hostPlatform.isMips64 [
(fetchpatch {
name = "0002-osdep-m4-Remove-getaddrinfo-too-low-checks.patch";
url = "https://raw.githubusercontent.com/buildroot/buildroot/master/package/dante/0002-osdep-m4-Remove-getaddrinfo-too-low-checks.patch";
sha256 = "sha256-e+qF8lB5tkiA7RlJ+tX5O6KxQrQp33RSPdP1TxU961Y=";
}) ];
postPatch = ''
substituteInPlace include/redefgen.sh --replace 'PATH=/bin:/usr/bin:/sbin:/usr/sbin' ""
'';
meta = with lib; {
description = "A circuit-level SOCKS client/server that can be used to provide convenient and secure network connectivity";
homepage = "https://www.inet.no/dante/";
maintainers = [ maintainers.arobyn ];
license = licenses.bsdOriginal;
platforms = platforms.linux ++ platforms.darwin;
};
}

View file

@ -0,0 +1,26 @@
{ buildPythonApplication, fetchPypi, lib, pycryptodome }:
buildPythonApplication rec {
pname = "dcnnt";
version = "0.6.0";
src = fetchPypi {
inherit pname version;
sha256 = "ef8578526163cb3e25fa352ba2f6f4d39309f477a72282416c89eddfb69c3a91";
};
propagatedBuildInputs = [
pycryptodome
];
meta = with lib; {
homepage = "https://github.com/cyanomiko/dcnnt-py";
description = "UI-less tool to connect Android phone with desktop";
longDescription = ''
Yet another tool to connect Android phone with desktop similar to
KDE Connect.
'';
license = licenses.mit;
maintainers = with maintainers; [ arnoutkroeze ];
};
}

View file

@ -0,0 +1,25 @@
{ lib
, buildGoModule
, fetchFromGitHub
}:
buildGoModule {
pname = "demoit";
version = "unstable-2020-06-11";
src = fetchFromGitHub {
owner = "dgageot";
repo = "demoit";
rev = "5762b169e7f2fc18913874bf52323ffbb906ce84";
sha256 = "1jcjqr758d29h3y9ajvzhy1xmxfix5mwhylz6jwhy5nmk28bjzx9";
};
vendorSha256 = null;
subPackages = [ "." ];
meta = with lib; {
description = "Live coding demos without Context Switching";
homepage = "https://github.com/dgageot/demoit";
license = licenses.asl20;
maintainers = [ maintainers.freezeboy ];
};
}

View file

@ -0,0 +1,41 @@
{ lib, buildGoModule, fetchFromGitHub
, nixosTests, postgresql, postgresqlTestHook }:
buildGoModule rec {
pname = "matrix-dendrite";
version = "0.8.5";
src = fetchFromGitHub {
owner = "matrix-org";
repo = "dendrite";
rev = "v${version}";
sha256 = "sha256-MPWvBUI6Mqt3f5UY6lpTBwPpihW+QSNq1M3FnIff+mM=";
};
vendorSha256 = "sha256-OXy2xuwTLPNvBnVB6wj/YRW/XMiekjTubRRPVX9bxdQ=";
checkInputs = [
postgresqlTestHook
postgresql
];
postgresqlTestUserOptions = "LOGIN SUPERUSER";
preCheck = ''
export PGUSER=$(whoami)
# temporarily disable this failing test
# it passes in upstream CI and requires further investigation
rm roomserver/internal/input/input_test.go
'';
passthru.tests = {
inherit (nixosTests) dendrite;
};
meta = with lib; {
homepage = "https://matrix-org.github.io/dendrite";
description = "A second-generation Matrix homeserver written in Go";
license = licenses.asl20;
maintainers = teams.matrix.members;
platforms = platforms.unix;
};
}

View file

@ -0,0 +1,38 @@
{ lib, buildGoModule, fetchFromGitHub, nixosTests }:
buildGoModule rec {
pname = "dex";
version = "2.31.2";
src = fetchFromGitHub {
owner = "dexidp";
repo = pname;
rev = "v${version}";
sha256 = "sha256-x9U+LtcgVYODQoiTkFShdALFfrTIhingrJ43RpHbc78=";
};
vendorSha256 = "sha256-l+/qjYokg5zHAFkKxtkdX49HqVW6kfz7OHqs6SRKDYg=";
subPackages = [
"cmd/dex"
];
ldflags = [
"-w" "-s" "-X github.com/dexidp/dex/version.Version=${src.rev}"
];
postInstall = ''
mkdir -p $out/share
cp -r $src/web $out/share/web
'';
passthru.tests = { inherit (nixosTests) dex-oidc; };
meta = with lib; {
description = "OpenID Connect and OAuth2 identity provider with pluggable connectors";
homepage = "https://github.com/dexidp/dex";
license = licenses.asl20;
maintainers = with maintainers; [ benley techknowlogick ];
platforms = platforms.unix;
};
}

View file

@ -0,0 +1,49 @@
{ lib, buildGoModule, fetchFromGitHub, installShellFiles, jemalloc, nodejs }:
buildGoModule rec {
pname = "dgraph";
version = "21.12.0";
src = fetchFromGitHub {
owner = "dgraph-io";
repo = "dgraph";
rev = "v${version}";
sha256 = "sha256-OYDWr+wJEIP7raIHsXSjvuFr2ENJOllufO5ff6lxoR4=";
};
vendorSha256 = "sha256-YtU3Yeq/lNeq7cOB+KvHbvlH9g40WuJk1ovHxCQMG60=";
doCheck = false;
ldflags = [
"-X github.com/dgraph-io/dgraph/x.dgraphVersion=${version}-oss"
];
tags = [
"oss"
];
nativeBuildInputs = [ installShellFiles ];
# todo those dependencies are required in the makefile, but verify how they are used
# actually
buildInputs = [ jemalloc nodejs ];
subPackages = [ "dgraph" ];
postInstall = ''
for shell in bash zsh; do
$out/bin/dgraph completion $shell > dgraph.$shell
installShellCompletion dgraph.$shell
done
'';
meta = with lib; {
homepage = "https://dgraph.io/";
description = "Fast, Distributed Graph DB";
maintainers = with maintainers; [ sigma ];
# Apache 2.0 because we use only build "oss"
license = licenses.asl20;
platforms = platforms.unix;
};
}

View file

@ -0,0 +1,51 @@
{ fetchurl, lib, stdenv, libtool, gettext, zlib, readline, gsasl
, guile, python3, pcre, libffi, groff }:
stdenv.mkDerivation rec {
pname = "dico";
version = "2.11";
src = fetchurl {
url = "mirror://gnu/${pname}/${pname}-${version}.tar.xz";
sha256 = "sha256-rB+Y4jPQ+srKrBBZ87gThKVZLib9TDCCrtAD9l4lLFo=";
};
hardeningDisable = [ "format" ];
nativeBuildInputs = [ groff ];
buildInputs =
[ libtool gettext zlib readline gsasl guile python3 pcre libffi ];
strictDeps = true;
doCheck = true;
meta = with lib; {
description = "Flexible dictionary server and client implementing RFC 2229";
homepage = "https://www.gnu.org/software/dico/";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ lovek323 ];
platforms = platforms.unix;
longDescription = ''
GNU Dico is a flexible modular implementation of DICT server
(RFC 2229). In contrast to another existing servers, it does
not depend on particular database format, instead it handles
database accesses using loadable modules.
The package includes several loadable modules for interfacing
with various database formats, among them a module for dict.org
databases and a module for transparently accessing Wikipedia or
Wiktionary sites as a dictionary database.
New modules can easily be written in C, Guile or Python. The
module API is mature and well documented.
A web interface serving several databases is available.
The package also includes a console client program for querying
remote dictionary servers.
'';
};
}

View file

@ -0,0 +1,11 @@
--- Makefile.in~ 2011-03-06 18:52:54.000000000 +0100
+++ Makefile.in 2014-01-29 19:04:51.384844897 +0100
@@ -123,7 +123,7 @@
%: %.o
$(LIBTOOL) --tag=CC --mode=link $(CC) -o $@ -static \
- $^ $(OBJS) $(LDFLAGS) -lz ${LIBS}
+ $(^:.o=.lo) $(OBJS) $(LDFLAGS) -lz ${LIBS}
include $(srcdir)/deps

View file

@ -0,0 +1,37 @@
{ lib, stdenv, fetchurl, which, bison, flex, libmaa, zlib, libtool }:
stdenv.mkDerivation rec {
pname = "dictd";
version = "1.13.1";
src = fetchurl {
url = "mirror://sourceforge/dict/dictd-${version}.tar.gz";
sha256 = "sha256-5PGmfRaJTYSUVp19yUQsFcw4wBHyuWMcfxzGInZlKhs=";
};
buildInputs = [ libmaa zlib ];
nativeBuildInputs = [ bison flex libtool which ];
# In earlier versions, parallel building was not supported but it's OK with 1.13
enableParallelBuilding = true;
patchPhase = "patch -p0 < ${./buildfix.diff}";
configureFlags = [
"--datadir=/run/current-system/sw/share/dictd"
"--sysconfdir=/etc"
];
postInstall = ''
install -Dm444 -t $out/share/doc/${pname} NEWS README
'';
meta = with lib; {
description = "Dict protocol server and client";
homepage = "http://www.dict.org";
license = licenses.gpl2;
maintainers = with maintainers; [ ];
platforms = platforms.linux;
};
}

View file

@ -0,0 +1,83 @@
{ stdenv, lib, dict }:
({ dictlist, allowList ? [ "127.0.0.1" ], denyList ? [ ] }:
/*
dictlist is a list of form
[ { filename = /path/to/files/basename;
name = "name"; } ]
basename.dict.dz and basename.index should be
dict files. Or look below for other options.
allowList is a list of IP/domain *-wildcarded strings
denyList is the same..
*/
let
link_arguments = map
(x: '' "${x.filename}" '')
dictlist;
databases = lib.concatStrings (map
(x:
"${x.name} ${x.filename}\n")
dictlist);
allow = lib.concatStrings (map (x: "allow ${x}\n") allowList);
deny = lib.concatStrings (map (x: "deny ${x}\n") denyList);
accessSection = "
access {
${allow}
${deny}
}
";
installPhase = ''
mkdir -p $out/share/dictd
cd $out/share/dictd
echo "${databases}" >databases.names
echo "${accessSection}" > dictd.conf
for j in ${toString link_arguments}; do
name="$(egrep ' '"$j"\$ databases.names)"
name=''${name% $j}
if test -d "$j"; then
if test -d "$j"/share/dictd ; then
echo "Got store path $j"
j="$j"/share/dictd
fi
echo "Directory reference: $j"
i=$(ls "$j""/"*.index)
i="''${i%.index}";
else
i="$j";
fi
echo "Basename is $i"
locale=$(cat "$(dirname "$i")"/locale)
base="$(basename "$i")"
echo "Locale is $locale"
export LC_ALL=$locale
export LANG=$locale
if test -e "$i".dict.dz; then
ln -s "$i".dict.dz
else
cp "$i".dict .
dictzip "$base".dict
fi
ln -s "$i".index .
dictfmt_index2word --locale $locale < "$base".index > "$base".word || true
dictfmt_index2suffix --locale $locale < "$base".index > "$base".suffix || true
echo "database $name {" >> dictd.conf
echo " data $out/share/dictd/$base.dict.dz" >> dictd.conf
echo " index $out/share/dictd/$base.index" >> dictd.conf
echo " index_word $out/share/dictd/$base.word" >> dictd.conf
echo " index_suffix $out/share/dictd/$base.suffix" >> dictd.conf
echo "}" >> dictd.conf
done
'';
in
stdenv.mkDerivation {
name = "dictd-dbs";
buildInputs = [ dict ];
dontUnpack = true;
inherit installPhase;
})

View file

@ -0,0 +1,95 @@
{ lib, stdenv, fetchurl, callPackage }:
let
# Probably a bug in some FreeDict release files, but easier to trivially
# work around than report. Not that it can cause any other problems..
makeDictdDBFreedict = src: name: locale:
makeDictdDB src name "{.,bin}" locale;
makeDictdDB = src: _name: _subdir: _locale:
stdenv.mkDerivation {
name = "dictd-db-${_name}";
inherit src;
locale = _locale;
dbName = _name;
dontBuild = true;
unpackPhase = ''
tar xf ${src}
'';
installPhase = ''
mkdir -p $out/share/dictd
cp $(ls ./${_subdir}/*.{dict*,index} || true) $out/share/dictd
echo "${_locale}" >$out/share/dictd/locale
'';
meta = {
description = "dictd-db dictionary for dictd";
platforms = lib.platforms.linux;
};
};
in rec {
deu2eng = makeDictdDBFreedict (fetchurl {
url = "mirror://sourceforge/freedict/deu-eng.tar.gz";
sha256 = "0dqrhv04g4f5s84nbgisgcfwk5x0rpincif0yfhfh4sc1bsvzsrb";
}) "deu-eng" "de_DE";
eng2deu = makeDictdDBFreedict (fetchurl {
url = "mirror://sourceforge/freedict/eng-deu.tar.gz";
sha256 = "01x12p72sa3071iff3jhzga8588440f07zr56r3x98bspvdlz73r";
}) "eng-deu" "en_EN";
nld2eng = makeDictdDBFreedict (fetchurl {
url = "mirror://sourceforge/freedict/nld-eng.tar.gz";
sha256 = "1vhw81pphb64fzsjvpzsnnyr34ka2fxizfwilnxyjcmpn9360h07";
}) "nld-eng" "nl_NL";
eng2nld = makeDictdDBFreedict (fetchurl {
url = "mirror://sourceforge/freedict/eng-nld.tar.gz";
sha256 = "0rcg28ldykv0w2mpxc6g4rqmfs33q7pbvf68ssy1q9gpf6mz7vcl";
}) "eng-nld" "en_UK";
eng2rus = makeDictdDBFreedict (fetchurl {
url = "mirror://sourceforge/freedict/eng-rus.tar.gz";
sha256 = "15409ivhww1wsfjr05083pv6mg10bak8v5pg1wkiqybk7ck61rry";
}) "eng-rus" "en_UK";
fra2eng = makeDictdDBFreedict (fetchurl {
url = "mirror://sourceforge/freedict/fra-eng.tar.gz";
sha256 = "0sdd88s2zs5whiwdf3hd0s4pzzv75sdsccsrm1wxc87l3hjm85z3";
}) "fra-eng" "fr_FR";
eng2fra = makeDictdDBFreedict (fetchurl {
url = "mirror://sourceforge/freedict/eng-fra.tar.gz";
sha256 = "0fi6rrnbqnhc6lq8d0nmn30zdqkibrah0mxfg27hsn9z7alwbj3m";
}) "eng-fra" "en_UK";
mueller_eng2rus_pkg = makeDictdDB (fetchurl {
url = "mirror://sourceforge/mueller-dict/mueller-dict-3.1.tar.gz";
sha256 = "04r5xxznvmcb8hkxqbjgfh2gxvbdd87jnhqn5gmgvxxw53zpwfmq";
}) "mueller-eng-rus" "mueller-dict-*/dict" "en_UK";
mueller_enru_abbr = {
outPath = "${mueller_eng2rus_pkg}/share/dictd/mueller-abbrev";
name = "mueller-abbr";
dbName = "mueller-abbr";
locale = "en_UK";
};
mueller_enru_base = {
outPath = "${mueller_eng2rus_pkg}/share/dictd/mueller-base";
name = "mueller-base";
dbName = "mueller-base";
locale = "en_UK";
};
mueller_enru_dict = {
outPath = "${mueller_eng2rus_pkg}/share/dictd/mueller-dict";
name = "mueller-dict";
dbName = "mueller-dict";
locale = "en_UK";
};
mueller_enru_geo = {
outPath = "${mueller_eng2rus_pkg}/share/dictd/mueller-geo";
name = "mueller-geo";
dbName = "mueller-geo";
locale = "en_UK";
};
mueller_enru_names = {
outPath = "${mueller_eng2rus_pkg}/share/dictd/mueller-names";
name = "mueller-names";
dbName = "mueller-names";
locale = "en_UK";
};
wordnet = callPackage ./dictd-wordnet.nix {};
wiktionary = callPackage ./wiktionary {};
}

View file

@ -0,0 +1,36 @@
{lib, stdenv, python2, wordnet, writeScript}:
stdenv.mkDerivation rec {
version = "542";
pname = "dict-db-wordnet";
buildInputs = [python2 wordnet];
convert = ./wordnet_structures.py;
builder = writeScript "builder.sh" ''
. ${stdenv}/setup
mkdir -p $out/share/dictd/
cd $out/share/dictd
for i in ${wordnet}/dict/data.*; do
DATA="$DATA `echo $i | sed -e s,data,index,` $i";
done
python ${convert} $DATA
echo en_US.UTF-8 > locale
'';
meta = {
description = "dictd-compatible version of WordNet";
longDescription =
'' WordNet® is a large lexical database of English. This package makes
the wordnet data available to dictd and by extension for lookup with
the dict command. '';
homepage = "https://wordnet.princeton.edu/";
maintainers = [ ];
platforms = lib.platforms.all;
};
}

View file

@ -0,0 +1,22 @@
{ lib, stdenv, fetchurl, libtool }:
stdenv.mkDerivation rec {
version = "1.3.2";
pname = "libmaa";
src = fetchurl {
url = "mirror://sourceforge/dict/libmaa-${version}.tar.gz";
sha256 = "1idi4c30pi79g5qfl7rr9s17krbjbg93bi8f2qrbsdlh78ga19ar";
};
buildInputs = [ libtool ];
# configureFlags = [ "--datadir=/run/current-system/share/dictd" ];
NIX_CFLAGS_COMPILE = "-Wno-error=format-truncation";
meta = with lib; {
description = "Dict protocol server and client";
maintainers = [ ];
platforms = platforms.linux;
};
}

View file

@ -0,0 +1,35 @@
{ lib, stdenv, fetchurl, python2, dict, glibcLocales }:
stdenv.mkDerivation rec {
pname = "dict-db-wiktionary";
version = "20220420";
src = fetchurl {
url = "https://dumps.wikimedia.org/enwiktionary/${version}/enwiktionary-${version}-pages-articles.xml.bz2";
sha256 = "qsha26LL2513SDtriE/0zdPX1zlnpzk1KKk+R9dSdew=";
};
# script in nixpkgs does not support python2
nativeBuildInputs = [ python2 dict glibcLocales ];
dontUnpack = true;
installPhase = ''
mkdir -p $out/share/dictd/
cd $out/share/dictd
${python2.interpreter} -O ${./wiktionary2dict.py} "${src}"
dictzip wiktionary-en.dict
echo en_US.UTF-8 > locale
'';
passthru.updateScript = ./update.sh;
meta = with lib; {
description = "DICT version of English Wiktionary";
homepage = "https://en.wiktionary.org/";
maintainers = with maintainers; [ qyliss ];
platforms = platforms.all;
license = with licenses; [ cc-by-sa-30 fdl11Plus ];
};
}

View file

@ -0,0 +1,42 @@
import subprocess
from html.parser import HTMLParser
from os.path import abspath, dirname
from urllib.request import urlopen
class WiktionaryLatestVersionParser(HTMLParser):
def __init__(self, current_version, *args, **kwargs):
self.latest_version = current_version
super().__init__(*args, **kwargs)
def handle_starttag(self, tag, attrs):
if tag != 'a':
return
href = dict(attrs)['href'][0:-1]
if href == 'latest':
return
self.latest_version = max(self.latest_version, href)
def nix_prefetch_url(url, algo='sha256'):
"""Prefetches the content of the given URL."""
print(f'nix-prefetch-url {url}')
out = subprocess.check_output(['nix-prefetch-url', '--type', algo, url])
return out.decode('utf-8').rstrip()
current_version = subprocess.check_output([
'nix', 'eval', '--raw',
'-f', dirname(abspath(__file__)) + '/../../../..',
'dictdDBs.wiktionary.version',
]).decode('utf-8')
parser = WiktionaryLatestVersionParser(current_version)
with urlopen('https://dumps.wikimedia.org/enwiktionary/') as resp:
parser.feed(resp.read().decode('utf-8'))
print(parser.latest_version)

View file

@ -0,0 +1,7 @@
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p common-updater-scripts python3
set -ueo pipefail
version="$(python "$(dirname "${BASH_SOURCE[0]}")"/latest_version.py)"
update-source-version dictdDBs.wiktionary "$version"

View file

@ -0,0 +1,778 @@
# Adapted to produce DICT-compatible files by Petr Rockai in 2012
# Based on code from wiktiondict by Greg Hewgill
import re
import sys
import codecs
import os
import textwrap
import time
import xml.sax
class Text:
def __init__(self, s):
self.s = s
def process(self):
return s
class TemplateCall:
def __init__(self):
pass
def process(self):
pass
class Template:
def __init__(self):
self.parts = []
def append(self, part):
self.parts.append(part)
def process(self):
return ''.join(x.process() for x in self.parts)
class Whitespace:
def __init__(self, s):
self.s = s
class OpenDouble: pass
class OpenTriple: pass
class CloseDouble: pass
class CloseTriple: pass
class Equals:
def __str__(self):
return "="
class Delimiter:
def __init__(self, c):
self.c = c
def __str__(self):
return self.c
def Tokenise(s):
s = unicode(s)
stack = []
last = 0
i = 0
while i < len(s):
if s[i] == '{' and i+1 < len(s) and s[i+1] == '{':
if i > last:
yield s[last:i]
if i+2 < len(s) and s[i+2] == '{':
yield OpenTriple()
stack.append(3)
i += 3
else:
yield OpenDouble()
stack.append(2)
i += 2
last = i
elif s[i] == '}' and i+1 < len(s) and s[i+1] == '}':
if i > last:
yield s[last:i]
if len(stack) == 0:
yield "}}"
i += 2
elif stack[-1] == 2:
yield CloseDouble()
i += 2
stack.pop()
elif i+2 < len(s) and s[i+2] == '}':
yield CloseTriple()
i += 3
stack.pop()
else:
raise SyntaxError()
last = i
elif s[i] == ':' or s[i] == '|':
if i > last:
yield s[last:i]
yield Delimiter(s[i])
i += 1
last = i
elif s[i] == '=':
if i > last:
yield s[last:i]
yield Equals()
i += 1
last = i
#elif s[i] == ' ' or s[i] == '\t' or s[i] == '\n':
# if i > last:
# yield s[last:i]
# last = i
# m = re.match(r"\s+", s[i:])
# assert m
# yield Whitespace(m.group(0))
# i += len(m.group(0))
# last = i
else:
i += 1
if i > last:
yield s[last:i]
def processSub(templates, tokens, args):
t = tokens.next()
if not isinstance(t, unicode):
raise SyntaxError
name = t
t = tokens.next()
default = None
if isinstance(t, Delimiter) and t.c == '|':
default = ""
while True:
t = tokens.next()
if isinstance(t, unicode):
default += t
elif isinstance(t, OpenDouble):
default += processTemplateCall(templates, tokens, args)
elif isinstance(t, OpenTriple):
default += processSub(templates, tokens, args)
elif isinstance(t, CloseTriple):
break
else:
print "Unexpected:", t
raise SyntaxError()
if name in args:
return args[name]
if default is not None:
return default
if name == "lang":
return "en"
return "{{{%s}}}" % name
def processTemplateCall(templates, tokens, args):
template = tokens.next().strip().lower()
args = {}
a = 1
t = tokens.next()
while True:
if isinstance(t, Delimiter):
name = unicode(a)
arg = ""
while True:
t = tokens.next()
if isinstance(t, unicode):
arg += t
elif isinstance(t, OpenDouble):
arg += processTemplateCall(templates, tokens, args)
elif isinstance(t, OpenTriple):
arg += processSub(templates, tokens, args)
elif isinstance(t, Delimiter) and t.c != '|':
arg += str(t)
else:
break
if isinstance(t, Equals):
name = arg.strip()
arg = ""
while True:
t = tokens.next()
if isinstance(t, (unicode, Equals)):
arg += unicode(t)
elif isinstance(t, OpenDouble):
arg += processTemplateCall(templates, tokens, args)
elif isinstance(t, OpenTriple):
arg += processSub(templates, tokens, args)
elif isinstance(t, Delimiter) and t.c != '|':
arg += str(t)
else:
break
arg = arg.strip()
else:
a += 1
args[name] = arg
elif isinstance(t, CloseDouble):
break
else:
print "Unexpected:", t
raise SyntaxError
#print template, args
if template[0] == '#':
if template == "#if":
if args['1'].strip():
return args['2']
elif '3' in args:
return args['3']
else:
return ""
elif template == "#ifeq":
if args['1'].strip() == args['2'].strip():
return args['3']
elif '4' in args:
return args['4']
else:
return ""
elif template == "#ifexist":
return ""
elif template == "#switch":
sw = args['1'].strip()
if sw in args:
return args[sw]
else:
return ""
else:
print "Unknown ParserFunction:", template
sys.exit(1)
if template not in templates:
return "{{%s}}" % template
return process(templates, templates[template], args)
def process(templates, s, args = {}):
s = re.compile(r"<!--.*?-->", re.DOTALL).sub("", s)
s = re.compile(r"<noinclude>.*?</noinclude>", re.DOTALL).sub("", s)
assert "<onlyinclude>" not in s
#s = re.sub(r"(.*?)<onlyinclude>(.*?)</onlyinclude>(.*)", r"\1", s)
s = re.compile(r"<includeonly>(.*?)</includeonly>", re.DOTALL).sub(r"\1", s)
r = ""
#print list(Tokenise(s))
tokens = Tokenise(s)
try:
while True:
t = tokens.next()
if isinstance(t, OpenDouble):
r += processTemplateCall(templates, tokens, args)
elif isinstance(t, OpenTriple):
r += processSub(templates, tokens, args)
else:
r += unicode(t)
except StopIteration:
pass
return r
def test():
templates = {
'lb': "{{",
'name-example': "I am a template example, my first name is '''{{{firstName}}}''' and my last name is '''{{{lastName}}}'''. You can reference my page at [[{{{lastName}}}, {{{firstName}}}]].",
't': "start-{{{1|pqr}}}-end",
't0': "start-{{{1}}}-end",
't1': "start{{{1}}}end<noinclude>moo</noinclude>",
't2a1': "{{t2demo|a|{{{1}}}}}",
't2a2': "{{t2demo|a|2={{{1}}}}}",
't2demo': "start-{{{1}}}-middle-{{{2}}}-end",
't5': "{{t2demo|{{{a}}}=b}}",
't6': "t2demo|a",
}
def t(text, expected):
print "text:", text
s = process(templates, text)
if s != expected:
print "got:", s
print "expected:", expected
sys.exit(1)
t("{{Name-example}}", "I am a template example, my first name is '''{{{firstName}}}''' and my last name is '''{{{lastName}}}'''. You can reference my page at [[{{{lastName}}}, {{{firstName}}}]].")
t("{{Name-example | firstName=John | lastName=Smith }}", "I am a template example, my first name is '''John''' and my last name is '''Smith'''. You can reference my page at [[Smith, John]].")
t("{{t0|a}}", "start-a-end")
t("{{t0| }}", "start- -end")
t("{{t0|}}", "start--end")
t("{{t0}}", "start-{{{1}}}-end")
t("{{t0| }}", "start- -end")
t("{{t0|\n}}", "start-\n-end")
t("{{t0|1= }}", "start--end")
t("{{t0|1=\n}}", "start--end")
t("{{T}}", "start-pqr-end")
t("{{T|}}", "start--end")
t("{{T|abc}}", "start-abc-end")
t("{{T|abc|def}}", "start-abc-end")
t("{{T|1=abc|1=def}}", "start-def-end")
t("{{T|abc|1=def}}", "start-def-end")
t("{{T|1=abc|def}}", "start-def-end")
t("{{T|{{T}}}}", "start-start-pqr-end-end")
t("{{T|{{T|{{T}}}}}}", "start-start-start-pqr-end-end-end")
t("{{T|{{T|{{T|{{T}}}}}}}}", "start-start-start-start-pqr-end-end-end-end")
t("{{T|a{{t|b}}}}", "start-astart-b-end-end")
t("{{T|{{T|a=b}}}}", "start-start-pqr-end-end")
t("{{T|a=b}}", "start-pqr-end")
t("{{T|1=a=b}}", "start-a=b-end")
#t("{{t1|{{lb}}tc}}}}", "start{{tcend}}")
#t("{{t2a1|1=x=y}}", "start-a-middle-{{{2}}}-end")
#t("{{t2a2|1=x=y}}", "start-a-middle-x=y-end")
#t("{{t5|a=2=d}}", "start-{{{1}}}-middle-d=b-end")
#t("{{ {{t6}} }}", "{{ t2demo|a }}")
t("{{t|[[a|b]]}}", "start-b-end")
t("{{t|[[a|b]] }}", "start-b -end")
Parts = {
# Standard POS headers
'noun': "n.",
'Noun': "n.",
'Noun 1': "n.",
'Noun 2': "n.",
'Verb': "v.",
'Adjective': "adj.",
'Adverb': "adv.",
'Pronoun': "pron.",
'Conjunction': "conj.",
'Interjection': "interj.",
'Preposition': "prep.",
'Proper noun': "n.p.",
'Proper Noun': "n.p.",
'Article': "art.",
# Standard non-POS level 3 headers
'{{acronym}}': "acr.",
'Acronym': "acr.",
'{{abbreviation}}': "abbr.",
'[[Abbreviation]]': "abbr.",
'Abbreviation': "abbr.",
'[[initialism]]': "init.",
'{{initialism}}': "init.",
'Initialism': "init.",
'Contraction': "cont.",
'Prefix': "prefix",
'Suffix': "suffix",
'Symbol': "sym.",
'Letter': "letter",
'Idiom': "idiom",
'Idioms': "idiom",
'Phrase': "phrase",
# Debated POS level 3 headers
'Number': "num.",
'Numeral': "num.",
'Cardinal number': "num.",
'Ordinal number': "num.",
'Cardinal numeral': "num.",
'Ordinal numeral': "num.",
# Other headers in use
'Personal pronoun': "pers.pron.",
'Adjective/Adverb': "adj./adv.",
'Proper adjective': "prop.adj.",
'Determiner': "det.",
'Demonstrative determiner': "dem.det.",
'Clitic': "clitic",
'Infix': "infix",
'Counter': "counter",
'Kanji': None,
'Kanji reading': None,
'Hiragana letter': None,
'Katakana letter': None,
'Pinyin': None,
'Han character': None,
'Hanzi': None,
'Hanja': None,
'Proverb': "prov.",
'Expression': None,
'Adjectival noun': None,
'Quasi-adjective': None,
'Particle': "part.",
'Infinitive particle': "part.",
'Possessive adjective': "poss.adj.",
'Verbal prefix': "v.p.",
'Postposition': "post.",
'Prepositional article': "prep.art.",
'Phrasal verb': "phr.v.",
'Participle': "participle",
'Interrogative auxiliary verb': "int.aux.v.",
'Pronominal adverb': "pron.adv.",
'Adnominal': "adn.",
'Abstract pronoun': "abs.pron.",
'Conjunction particle': None,
'Root': "root",
# Non-standard, deprecated headers
'Noun form': "n.",
'Verb form': "v.",
'Adjective form': "adj.form.",
'Nominal phrase': "nom.phr.",
'Noun phrase': "n. phrase",
'Verb phrase': "v. phrase",
'Transitive verb': "v.t.",
'Intransitive verb': "v.i.",
'Reflexive verb': "v.r.",
'Cmavo': None,
'Romaji': "rom.",
'Hiragana': None,
'Furigana': None,
'Compounds': None,
# Other headers seen
'Alternative forms': None,
'Alternative spellings': None,
'Anagrams': None,
'Antonym': None,
'Antonyms': None,
'Conjugation': None,
'Declension': None,
'Declension and pronunciations': None,
'Definite Article': "def.art.",
'Definite article': "def.art.",
'Demonstrative pronoun': "dem.pron.",
'Derivation': None,
'Derived expression': None,
'Derived expressions': None,
'Derived forms': None,
'Derived phrases': None,
'Derived terms': None,
'Derived, Related terms': None,
'Descendants': None,
#'Etymology': None,
#'Etymology 1': None,
#'Etymology 2': None,
#'Etymology 3': None,
#'Etymology 4': None,
#'Etymology 5': None,
'Examples': None,
'External links': None,
'[[Gismu]]': None,
'Gismu': None,
'Homonyms': None,
'Homophones': None,
'Hyphenation': None,
'Indefinite article': "art.",
'Indefinite pronoun': "ind.pron.",
'Indefinite Pronoun': "ind.pron.",
'Indetermined pronoun': "ind.pron.",
'Interrogative conjunction': "int.conj.",
'Interrogative determiner': "int.det.",
'Interrogative particle': "int.part.",
'Interrogative pronoun': "int.pron.",
'Legal expression': "legal",
'Mass noun': "n.",
'Miscellaneous': None,
'Mutations': None,
'Noun and verb': "n/v.",
'Other language': None,
'Pinyin syllable': None,
'Possessive determiner': "poss.det.",
'Possessive pronoun': "poss.pron.",
'Prepositional phrase': "prep.phr.",
'Prepositional Pronoun': "prep.pron.",
'Pronunciation': None,
'Pronunciation 1': None,
'Pronunciation 2': None,
'Quotations': None,
'References': None,
'Reflexive pronoun': "refl.pron.",
'Related expressions': None,
'Related terms': None,
'Related words': None,
'Relative pronoun': "rel.pron.",
'Saying': "saying",
'See also': None,
'Shorthand': None,
'[http://en.wikipedia.org/wiki/Shorthand Shorthand]': None,
'Sister projects': None,
'Spelling note': None,
'Synonyms': None,
'Translation': None,
'Translations': None,
'Translations to be checked': None,
'Transliteration': None,
'Trivia': None,
'Usage': None,
'Usage in English': None,
'Usage notes': None,
'Verbal noun': "v.n.",
}
PartsUsed = {}
for p in Parts.keys():
PartsUsed[p] = 0
def encode(s):
r = e(s)
assert r[1] == len(s)
return r[0]
def dowikilink(m):
a = m.group(1).split("|")
if len(a) > 1:
link = a[1]
else:
link = a[0]
if ':' in link:
link = ""
return link
seentemplates = {}
def dotemplate(m):
aa = m.group(1).split("|")
args = {}
n = 0
for a in aa:
am = re.match(r"(.*?)(=(.*))?", a)
if am:
args[am.group(1)] = am.group(3)
else:
n += 1
args[n] = am.group(1)
#if aa[0] in seentemplates:
# seentemplates[aa[0]] += 1
#else:
# seentemplates[aa[0]] = 1
# print len(seentemplates), aa[0]
#print aa[0]
#if aa[0] not in Templates:
# return "(unknown template %s)" % aa[0]
#body = Templates[aa[0]]
#body = re.sub(r"<noinclude>.*?</noinclude>", "", body)
#assert "<onlyinclude>" not in body
##body = re.sub(r"(.*?)<onlyinclude>(.*?)</onlyinclude>(.*)", r"\1", body)
#body = re.sub(r"<includeonly>(.*?)</includeonly>", r"\1", body)
#def dotemplatearg(m):
# ta = m.group(1).split("|")
# if ta[0] in args:
# return args[ta[0]]
# elif len(ta) > 1:
# return ta[1]
# else:
# return "{{{%s}}}" % ta[0]
#body = re.sub(r"{{{(.*?)}}}", dotemplatearg, body)
#return dewiki(body)
def doparserfunction(m):
a = m.group(2).split("|")
if m.group(1) == "ifeq":
if a[0] == a[1]:
return a[2]
elif len(a) >= 4:
return a[3]
return ""
def dewiki(body, indent = 0):
# process in this order:
# {{{ }}}
# <> <>
# [[ ]]
# {{ }}
# ''' '''
# '' ''
#body = wikimediatemplate.process(Templates, body)
body = re.sub(r"\[\[(.*?)\]\]", dowikilink, body)
#body = re.sub(r"{{(.*?)}}", dotemplate, body)
#body = re.sub(r"{{#(.*?):(.*?)}}", doparserfunction, body)
body = re.sub(r"'''(.*?)'''", r"\1", body)
body = re.sub(r"''(.*?)''", r"\1", body)
lines = body.split("\n")
n = 0
i = 0
while i < len(lines):
if len(lines[i]) > 0 and lines[i][0] == "#":
if len(lines[i]) > 1 and lines[i][1] == '*':
wlines = textwrap.wrap(lines[i][2:].strip(),
initial_indent = " * ",
subsequent_indent = " ")
elif len(lines[i]) > 1 and lines[i][1] == ':':
wlines = textwrap.wrap(lines[i][2:].strip(),
initial_indent = " ",
subsequent_indent = " ")
else:
n += 1
wlines = textwrap.wrap(str(n) + ". " + lines[i][1:].strip(),
subsequent_indent = " ")
elif len(lines[i]) > 0 and lines[i][0] == "*":
n = 0
wlines = textwrap.wrap(lines[i][1:].strip(),
initial_indent = "* ",
subsequent_indent = " ")
else:
n = 0
wlines = textwrap.wrap(lines[i].strip())
if len(wlines) == 0:
wlines = ['']
lines[i:i+1] = wlines
i += len(wlines)
return ''.join(" "*(indent-1)+x+"\n" for x in lines)
class WikiSection:
def __init__(self, heading, body):
self.heading = heading
self.body = body
#self.lines = re.split("\n+", body.strip())
#if len(self.lines) == 1 and len(self.lines[0]) == 0:
# self.lines = []
self.children = []
def __str__(self):
return "<%s:%i:%s>" % (self.heading, len(self.body or ""), ','.join([str(x) for x in self.children]))
def add(self, section):
self.children.append(section)
def parse(word, text):
headings = list(re.finditer("^(=+)\s*(.*?)\s*=+\n", text, re.MULTILINE))
#print [x.group(1) for x in headings]
doc = WikiSection(word, "")
stack = [doc]
for i, m in enumerate(headings):
depth = len(m.group(1))
if depth < len(stack):
stack = stack[:depth]
else:
while depth > len(stack):
s = WikiSection(None, "")
stack[-1].add(s)
stack.append(s)
if i+1 < len(headings):
s = WikiSection(m.group(2), text[m.end(0):headings[i+1].start(0)].strip())
else:
s = WikiSection(m.group(2), text[m.end(0):].strip())
assert len(stack) == depth
stack[-1].add(s)
stack.append(s)
#while doc.heading is None and len(doc.lines) == 0 and len(doc.children) == 1:
# doc = doc.children[0]
return doc
def formatFull(word, doc):
def f(depth, section):
if section.heading:
r = " "*(depth-1) + section.heading + "\n\n"
else:
r = ""
if section.body:
r += dewiki(section.body, depth+1)+"\n"
#r += "".join(" "*depth + x + "\n" for x in dewiki(section.body))
#if len(section.lines) > 0:
# r += "\n"
for c in section.children:
r += f(depth+1, c)
return r
s = f(0, doc)
s += "Ref: http://en.wiktionary.org/wiki/%s\n" % word
return s
def formatNormal(word, doc):
def f(depth, posdepth, section):
r = ""
if depth == posdepth:
if not section.heading or section.heading.startswith("Etymology"):
posdepth += 1
elif section.heading in Parts:
#p = Parts[section.heading]
#if p:
# r += " "*(depth-1) + word + " (" + p + ")\n\n"
r += " "*(depth-1) + section.heading + "\n\n"
else:
print >>errors, "Unknown part: (%s) %s" % (word, section.heading)
return ""
elif depth > posdepth:
return ""
elif section.heading:
r += " "*(depth-1) + section.heading + "\n\n"
if section.body:
r += dewiki(section.body, depth+1)+"\n"
#r += "".join(" "*depth + x + "\n" for x in dewiki(section.lines))
#if len(section.lines) > 0:
# r += "\n"
for c in section.children:
r += f(depth+1, posdepth, c)
return r
s = f(0, 3, doc)
s += "Ref: http://en.wiktionary.org/wiki/%s\n" % word
return s
def formatBrief(word, doc):
def f(depth, posdepth, section):
if depth == posdepth:
h = section.heading
if not section.heading or section.heading.startswith("Etymology"):
posdepth += 1
elif section.heading in Parts:
#h = Parts[section.heading]
#if h:
# h = "%s (%s)" % (word, h)
pass
stack.append([h, False])
elif depth > 0:
stack.append([section.heading, False])
else:
stack.append(["%h " + section.heading, False])
r = ""
#if section.heading:
# r += " "*(depth-1) + section.heading + "\n"
body = ''.join(x+"\n" for x in section.body.split("\n") if len(x) > 0 and x[0] == '#')
if len(body) > 0:
for i in range(len(stack)):
if not stack[i][1]:
if stack[i][0]:
r += " "*(i-1) + stack[i][0] + "\n"
stack[i][1] = True
r += dewiki(body, depth+1)
for c in section.children:
r += f(depth+1, posdepth, c)
stack.pop()
return r
stack = []
s = f(0, 3, doc)
s += "Ref: http://en.wiktionary.org/wiki/%s\n" % word
return s
class WikiHandler(xml.sax.ContentHandler):
def __init__(self):
self.element = None
self.page = None
self.text = ""
self.long = {}
def startElement(self, name, attrs):
#print "start", name, attrs
self.element = name
def endElement(self, name):
#print "end", name
if self.element == "text":
if self.page:
if self.page in self.long:
print self.page, len(self.text)
print
self.doPage(self.page, self.text)
self.page = None
self.text = ""
self.element = None
def characters(self, content):
#print "characters", content
if self.element == "title":
if self.checkPage(content):
self.page = content
elif self.element == "text":
if self.page:
self.text += content
if len(self.text) > 100000 and self.page not in self.long:
self.long[self.page] = 1
def checkPage(self, page):
return False
def doPage(self, page, text):
pass
class TemplateHandler(WikiHandler):
def checkPage(self, page):
return page.startswith("Template:")
def doPage(self, page, text):
Templates[page[page.find(':')+1:].lower()] = text
class WordHandler(WikiHandler):
def checkPage(self, page):
return ':' not in page
def doPage(self, page, text):
m = re.match(r"#redirect\s*\[\[(.*?)\]\]", text, re.IGNORECASE)
if m:
out.write(" See <%s>" % page)
return
doc = parse(page, text)
out.write(formatBrief(page, doc))
#print formatBrief(page, doc)
fn = sys.argv[1]
info = """ This file was converted from the original database on:
%s
The original data is available from:
http://en.wiktionary.org
The version from which this file was generated was:
%s
Wiktionary is available under the GNU Free Documentation License.
""" % (time.ctime(), os.path.basename(fn))
errors = codecs.open("mkdict.err", "w", "utf_8")
e = codecs.getencoder("utf_8")
Templates = {}
f = os.popen("bunzip2 -c %s" % fn, "r")
xml.sax.parse(f, TemplateHandler())
f.close()
f = os.popen("bunzip2 -c %s" % fn, "r")
out = codecs.getwriter("utf_8")(
os.popen("dictfmt -p wiktionary-en --locale en_US.UTF-8 --columns 0 -u http://en.wiktionary.org", "w"))
out.write(("%%h English Wiktionary\n%s" % info).encode('utf-8'))
xml.sax.parse(f, WordHandler())
f.close()
out.close()

View file

@ -0,0 +1,319 @@
#!/usr/bin/env python
#Copyright 2007 Sebastian Hagen
# This file is part of wordnet_tools.
# wordnet_tools is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2
# as published by the Free Software Foundation
# wordnet_tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with wordnet_tools; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# This program requires python >= 2.4.
# This program converts wordnet index/data file pairs into dict index/data
# files usable by dictd.
# This is basically a reimplementation of the wnfilter program by Rik Faith,
# which unfortunately doesn't work correctly for wordnet files in the newer
# formats. This version of wordnet_structures whould parse wordnet 2.1 files
# correctly, and create output very similar to what wnfilter would have
# written.
import datetime
from textwrap import TextWrapper
CAT_ADJECTIVE = 0
CAT_ADVERB = 1
CAT_NOUN = 2
CAT_VERB = 3
category_map = {
'n': CAT_NOUN,
'v': CAT_VERB,
'a': CAT_ADJECTIVE,
's': CAT_ADJECTIVE,
'r': CAT_ADVERB
}
class WordIndex:
def __init__(self, lemma, category, ptrs, synsets, tagsense_count):
self.lemma = lemma
self.category = category
self.ptrs = ptrs
self.synsets = synsets
self.tagsense_count = tagsense_count
@classmethod
def build_from_line(cls, line_data, synset_map):
line_split = line_data.split()
lemma = line_split[0]
category = category_map[line_split[1]]
synset_count = int(line_split[2],10)
ptr_count = int(line_split[3],10)
ptrs = [line_split[i] for i in range(3, 3+ptr_count)]
tagsense_count = int(line_split[5 + ptr_count],10)
synsets = [synset_map[int(line_split[i],10)] for i in range(6 + ptr_count, 6 + ptr_count + synset_count)]
return cls(lemma, category, ptrs, synsets, tagsense_count)
@classmethod
def build_from_file(cls, f, synset_map, rv_base=None):
if (rv_base is None):
rv = {}
else:
rv = rv_base
for line in f:
if (line.startswith(' ')):
continue
wi = cls.build_from_line(line, synset_map)
word = wi.lemma.lower()
if not (word in rv):
rv[word] = []
rv[word].append(wi)
return rv
def __repr__(self):
return '%s%s' % (self.__class__.__name__, (self.lemma, self.category, self.ptrs, self.synsets, self.tagsense_count))
class WordIndexDictFormatter(WordIndex):
category_map_rev = {
CAT_NOUN: 'n',
CAT_VERB: 'v',
CAT_ADJECTIVE: 'adj',
CAT_ADVERB: 'adv'
}
linesep = '\n'
LINE_WIDTH_MAX = 68
prefix_fmtf_line_first = '%5s 1: '
prefix_fmtn_line_first = ' '
prefix_fmtf_line_nonfirst = '%5d: '
prefix_fmtn_line_nonfirst = ' '
def dict_str(self):
tw = TextWrapper(width=self.LINE_WIDTH_MAX,
initial_indent=(self.prefix_fmtf_line_first % self.category_map_rev[self.category]),
subsequent_indent=self.prefix_fmtn_line_first)
lines = (tw.wrap(self.synsets[0].dict_str()))
i = 2
for synset in self.synsets[1:]:
tw = TextWrapper(width=self.LINE_WIDTH_MAX,
initial_indent=(self.prefix_fmtf_line_nonfirst % i),
subsequent_indent=self.prefix_fmtn_line_nonfirst)
lines.extend(tw.wrap(synset.dict_str()))
i += 1
return self.linesep.join(lines)
class Synset:
def __init__(self, offset, ss_type, words, ptrs, gloss, frames=()):
self.offset = offset
self.type = ss_type
self.words = words
self.ptrs = ptrs
self.gloss = gloss
self.frames = frames
self.comments = []
@classmethod
def build_from_line(cls, line_data):
line_split = line_data.split()
synset_offset = int(line_split[0],10)
ss_type = category_map[line_split[2]]
word_count = int(line_split[3],16)
words = [line_split[i] for i in range(4, 4 + word_count*2,2)]
ptr_count = int(line_split[4 + word_count*2],10)
ptrs = [(line_split[i], line_split[i+1], line_split[i+2], line_split[i+3]) for i in range(5 + word_count*2,4 + word_count*2 + ptr_count*4,4)]
tok = line_split[5 + word_count*2 + ptr_count*4]
base = 6 + word_count*2 + ptr_count*4
if (tok != '|'):
frame_count = int(tok, 10)
frames = [(int(line_split[i+1],10), int(line_split[i+2],16)) for i in range(base, base + frame_count*3, 3)]
base += frame_count*3 + 1
else:
frames = []
line_split2 = line_data.split(None, base)
if (len(line_split2) < base):
gloss = None
else:
gloss = line_split2[-1]
return cls(synset_offset, ss_type, words, ptrs, gloss, frames)
@classmethod
def build_from_file(cls, f):
rv = {}
comments = []
for line in f:
if (line.startswith(' ')):
line_s = line.lstrip().rstrip('\n')
line_elements = line_s.split(None,1)
try:
int(line_elements[0])
except ValueError:
continue
if (len(line_elements) == 1):
line_elements.append('')
comments.append(line_elements[1])
continue
synset = cls.build_from_line(line.rstrip())
rv[synset.offset] = synset
return (rv, comments)
def dict_str(self):
rv = self.gloss
if (len(self.words) > 1):
rv += ' [syn: %s]' % (', '.join([('{%s}' % word) for word in self.words]))
return rv
def __repr__(self):
return '%s%s' % (self.__class__.__name__, (self.offset, self.type, self.words, self.ptrs, self.gloss, self.frames))
class WordnetDict:
db_info_fmt = '''This file was converted from the original database on:
%(conversion_datetime)s
The original data is available from:
%(wn_url)s
The original data was distributed with the notice shown below. No
additional restrictions are claimed. Please redistribute this changed
version under the same conditions and restriction that apply to the
original version.\n\n
%(wn_license)s'''
datetime_fmt = '%Y-%m-%dT%H:%M:%S'
base64_map = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
def __init__(self, wn_url, desc_short, desc_long):
self.word_data = {}
self.wn_url = wn_url
self.desc_short = desc_short
self.desc_long = desc_long
self.wn_license = None
def wn_dict_add(self, file_index, file_data):
file_data.seek(0)
file_index.seek(0)
(synsets, license_lines) = Synset.build_from_file(file_data)
WordIndexDictFormatter.build_from_file(file_index, synsets, self.word_data)
if (license_lines):
self.wn_license = '\n'.join(license_lines) + '\n'
@classmethod
def base64_encode(cls, i):
"""Encode a non-negative integer into a dictd compatible base64 string"""
if (i < 0):
raise ValueError('Value %r for i is negative' % (i,))
r = 63
e = 1
while (r < i):
e += 1
r = 64**e - 1
rv = ''
while (e > 0):
e -= 1
d = (i / 64**e)
rv += cls.base64_map[d]
i = i % (64**e)
return rv
@classmethod
def dict_entry_write(cls, file_index, file_data, key, entry, linesep='\n'):
"""Write a single dict entry for <key> to index and data files"""
entry_start = file_data.tell()
file_data.write(entry)
entry_len = len(entry)
file_index.write('%s\t%s\t%s%s' % (key, cls.base64_encode(entry_start),
cls.base64_encode(entry_len), linesep))
def dict_generate(self, file_index, file_data):
file_index.seek(0)
file_data.seek(0)
# The dictd file format is fairly iffy on the subject of special
# headwords: either dictd is buggy, or the manpage doesn't tell the whole
# story about the format.
# The upshot is that order of these entries in the index *matters*.
# Putting them at the beginning and in alphabetic order is afaict ok.
# Some other orders completely and quietly break the ability to look
# those headwords up.
# -- problem encountered with 1.10.2, at 2007-08-05.
file_data.write('\n')
wn_url = self.wn_url
conversion_datetime = datetime.datetime.now().strftime(self.datetime_fmt)
wn_license = self.wn_license
self.dict_entry_write(file_index, file_data, '00-database-info', '00-database-info\n%s\n' % (self.db_info_fmt % vars()))
self.dict_entry_write(file_index, file_data, '00-database-long', '00-database-long\n%s\n' % self.desc_long)
self.dict_entry_write(file_index, file_data, '00-database-short', '00-database-short\n%s\n' % self.desc_short)
self.dict_entry_write(file_index, file_data, '00-database-url', '00-database-url\n%s\n' % self.wn_url)
words = self.word_data.keys()
words.sort()
for word in words:
for wi in self.word_data[word]:
word_cs = word
# Use case-sensitivity information of first entry of first synset that
# matches this word case-insensitively
for synset in wi.synsets:
for ss_word in synset.words:
if (ss_word.lower() == word_cs.lower()):
word_cs = ss_word
break
else:
continue
break
else:
continue
break
outstr = ''
for wi in self.word_data[word]:
outstr += wi.dict_str() + '\n'
outstr = '%s%s%s' % (word_cs, wi.linesep, outstr)
self.dict_entry_write(file_index, file_data, word_cs, outstr, wi.linesep)
file_index.truncate()
file_data.truncate()
if (__name__ == '__main__'):
import optparse
op = optparse.OptionParser(usage='usage: %prog [options] (<wn_index_file> <wn_data_file>)+')
op.add_option('-i', '--outindex', dest='oi', default='wn.index', help='filename of index file to write to')
op.add_option('-d', '--outdata', dest='od', default='wn.dict', help='filename of data file to write to')
op.add_option('--wn_url', dest='wn_url', default='ftp://ftp.cogsci.princeton.edu/pub/wordnet/2.0', help='URL for wordnet sources')
op.add_option('--db_desc_short', dest='desc_short', default=' WordNet (r) 2.1 (2005)', help='short dict DB description')
op.add_option('--db_desc_long', dest='desc_long', default=' WordNet (r): A Lexical Database for English from the\n Cognitive Science Laboratory at Princeton University', help='long dict DB description')
(options, args) = op.parse_args()
wnd = WordnetDict(wn_url=options.wn_url, desc_short=options.desc_short, desc_long=options.desc_long)
for i in range(0,len(args),2):
print 'Opening index file %r...' % args[i]
file_index = file(args[i])
print 'Opening data file %r...' % args[i+1]
file_data = file(args[i+1])
print 'Parsing index file and data file...'
wnd.wn_dict_add(file_index, file_data)
print 'All input files parsed. Writing output to index file %r and data file %r.' % (options.oi, options.od)
wnd.dict_generate(file(options.oi, 'w'),file(options.od, 'w'))
print 'All done.'

View file

@ -0,0 +1,27 @@
{ lib, stdenv, fetchurl, munge, lua,
libcap, perl, ncurses
}:
stdenv.mkDerivation rec {
pname = "diod";
version = "1.0.24";
src = fetchurl {
url = "https://github.com/chaos/diod/releases/download/${version}/${pname}-${version}.tar.gz";
sha256 = "17wckwfsqj61yixz53nwkc35z66arb1x3napahpi64m7q68jn7gl";
};
postPatch = ''
substituteInPlace diod/xattr.c --replace attr/xattr.h sys/xattr.h
sed -i -e '/sys\/types\.h>/a #include <sys/sysmacros.h>' diod/ops.c
'';
buildInputs = [ munge lua libcap perl ncurses ];
meta = with lib; {
description = "An I/O forwarding server that implements a variant of the 9P protocol";
maintainers = with maintainers; [ rnhmjoj ];
platforms = platforms.linux;
license = licenses.gpl2Plus;
};
}

View file

@ -0,0 +1,86 @@
{ config, stdenv, lib, fetchurl, fetchpatch
, perl, pkg-config
, libcap, libtool, libxml2, openssl, libuv, nghttp2, jemalloc
, enableGSSAPI ? true, libkrb5
, enablePython ? false, python3
, enableSeccomp ? false, libseccomp
, buildPackages, nixosTests
}:
stdenv.mkDerivation rec {
pname = "bind";
version = "9.18.3";
src = fetchurl {
url = "https://downloads.isc.org/isc/bind9/${version}/${pname}-${version}.tar.xz";
sha256 = "sha256-CtjadzvZPLoO9mzIGZlpjr35w+UfrtXlyMHrdcrSrm8=";
};
outputs = [ "out" "lib" "dev" "man" "dnsutils" "host" ];
patches = [
./dont-keep-configure-flags.patch
];
nativeBuildInputs = [ perl pkg-config ];
buildInputs = [ libtool libxml2 openssl libuv nghttp2 jemalloc ]
++ lib.optional stdenv.isLinux libcap
++ lib.optional enableSeccomp libseccomp
++ lib.optional enableGSSAPI libkrb5
++ lib.optional enablePython (python3.withPackages (ps: with ps; [ ply ]));
depsBuildBuild = [ buildPackages.stdenv.cc ];
configureFlags = [
"--localstatedir=/var"
"--with-libtool"
(if enablePython then "--with-python" else "--without-python")
"--without-atf"
"--without-dlopen"
"--without-docbook-xsl"
"--without-idn"
"--without-idnlib"
"--without-lmdb"
"--without-libjson"
"--without-pkcs11"
"--without-purify"
"--with-randomdev=/dev/random"
"--with-ecdsa"
"--with-gost"
"--without-eddsa"
"--with-aes"
] ++ lib.optional stdenv.isLinux "--with-libcap=${libcap.dev}"
++ lib.optional enableSeccomp "--enable-seccomp"
++ lib.optional enableGSSAPI "--with-gssapi=${libkrb5.dev}/bin/krb5-config"
++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform) "BUILD_CC=$(CC_FOR_BUILD)";
postInstall = ''
moveToOutput bin/bind9-config $dev
moveToOutput bin/host $host
moveToOutput bin/dig $dnsutils
moveToOutput bin/delv $dnsutils
moveToOutput bin/nslookup $dnsutils
moveToOutput bin/nsupdate $dnsutils
for f in "$lib/lib/"*.la "$dev/bin/"bind*-config; do
sed -i "$f" -e 's|-L${openssl.dev}|-L${lib.getLib openssl}|g'
done
'';
doCheck = false; # requires root and the net
passthru.tests = { inherit (nixosTests) bind; };
meta = with lib; {
homepage = "https://www.isc.org/bind/";
description = "Domain name server";
license = licenses.mpl20;
changelog = "https://downloads.isc.org/isc/bind9/cur/${lib.versions.majorMinor version}/CHANGES";
maintainers = with maintainers; [ globin ];
platforms = platforms.unix;
outputsToInstall = [ "out" "dnsutils" "host" ];
};
}

View file

@ -0,0 +1,40 @@
diff --git a/bin/named/include/named/globals.h b/bin/named/include/named/globals.h
index 82b632ef04..dedfd4d33b 100644
--- a/bin/named/include/named/globals.h
+++ b/bin/named/include/named/globals.h
@@ -69,7 +69,9 @@ EXTERN const char *named_g_version INIT(PACKAGE_VERSION);
EXTERN const char *named_g_product INIT(PACKAGE_NAME);
EXTERN const char *named_g_description INIT(PACKAGE_DESCRIPTION);
EXTERN const char *named_g_srcid INIT(PACKAGE_SRCID);
+#if 0
EXTERN const char *named_g_configargs INIT(PACKAGE_CONFIGARGS);
+#endif
EXTERN const char *named_g_builder INIT(PACKAGE_BUILDER);
EXTERN in_port_t named_g_port INIT(0);
EXTERN in_port_t named_g_tlsport INIT(0);
diff --git a/bin/named/main.c b/bin/named/main.c
index 9ad2d0e277..9729a2b3fc 100644
--- a/bin/named/main.c
+++ b/bin/named/main.c
@@ -481,7 +481,9 @@ printversion(bool verbose) {
}
printf("running on %s\n", named_os_uname());
+#if 0
printf("built by %s with %s\n", PACKAGE_BUILDER, PACKAGE_CONFIGARGS);
+#endif
#ifdef __clang__
printf("compiled by CLANG %s\n", __VERSION__);
#else /* ifdef __clang__ */
@@ -1027,9 +1029,11 @@ setup(void) {
NAMED_LOGMODULE_MAIN, ISC_LOG_NOTICE, "running on %s",
named_os_uname());
+#if 0
isc_log_write(named_g_lctx, NAMED_LOGCATEGORY_GENERAL,
NAMED_LOGMODULE_MAIN, ISC_LOG_NOTICE, "built with %s",
PACKAGE_CONFIGARGS);
+#endif
isc_log_write(named_g_lctx, NAMED_LOGCATEGORY_GENERAL,
NAMED_LOGMODULE_MAIN, ISC_LOG_NOTICE,

View file

@ -0,0 +1,38 @@
{ lib
, stdenv
, buildGoModule
, fetchFromGitHub
}:
buildGoModule rec {
pname = "coredns";
version = "1.9.2";
src = fetchFromGitHub {
owner = "coredns";
repo = "coredns";
rev = "v${version}";
sha256 = "sha256-6ABcXRuPEkzhjVZcltPoWGAc+fs6FwmgQCMRuLmhXxo=";
};
vendorSha256 = "sha256-0S77748voNlIuY6yUAa669pB09h35THojCyQKUm5VFc=";
postPatch = ''
substituteInPlace test/file_cname_proxy_test.go \
--replace "TestZoneExternalCNAMELookupWithProxy" \
"SkipZoneExternalCNAMELookupWithProxy"
substituteInPlace test/readme_test.go \
--replace "TestReadme" "SkipReadme"
'' + lib.optionalString stdenv.isDarwin ''
# loopback interface is lo0 on macos
sed -E -i 's/\blo\b/lo0/' plugin/bind/setup_test.go
'';
meta = with lib; {
homepage = "https://coredns.io";
description = "A DNS server that runs middleware";
license = licenses.asl20;
maintainers = with maintainers; [ rushmorem rtreffer deltaevo superherointj ];
};
}

View file

@ -0,0 +1,52 @@
{ lib, stdenv, fetchurl, pkg-config, systemd
, boost, libsodium, libedit, re2
, net-snmp, lua, protobuf, openssl, zlib, h2o
, nghttp2, nixosTests
}:
stdenv.mkDerivation rec {
pname = "dnsdist";
version = "1.7.0";
src = fetchurl {
url = "https://downloads.powerdns.com/releases/dnsdist-${version}.tar.bz2";
sha256 = "sha256-eMxyywzPf7Xz8vrgnHntplpSVjdNoJu1Qbc16mho/GQ=";
};
patches = [
# Disable tests requiring networking:
# "Error connecting to new server with address 192.0.2.1:53: connecting socket to 192.0.2.1:53: Network is unreachable"
./disable-network-tests.patch
];
nativeBuildInputs = [ pkg-config protobuf ];
buildInputs = [ systemd boost libsodium libedit re2 net-snmp lua openssl zlib h2o nghttp2 ];
configureFlags = [
"--with-libsodium"
"--with-re2"
"--enable-dnscrypt"
"--enable-dns-over-tls"
"--enable-dns-over-https"
"--with-protobuf=yes"
"--with-net-snmp"
"--disable-dependency-tracking"
"--enable-unit-tests"
"--enable-systemd"
];
doCheck = true;
enableParallelBuilding = true;
passthru.tests = {
inherit (nixosTests) dnsdist;
};
meta = with lib; {
description = "DNS Loadbalancer";
homepage = "https://dnsdist.org";
license = licenses.gpl2;
maintainers = with maintainers; [ jojosch ];
};
}

View file

@ -0,0 +1,28 @@
diff --git a/test-dnsdisttcp_cc.cc b/test-dnsdisttcp_cc.cc
index 1fbb00e..dc04137 100644
--- a/test-dnsdisttcp_cc.cc
+++ b/test-dnsdisttcp_cc.cc
@@ -848,6 +848,7 @@ BOOST_AUTO_TEST_CASE(test_IncomingConnectionWithProxyProtocol_SelfAnswered)
BOOST_AUTO_TEST_CASE(test_IncomingConnection_BackendNoOOOR)
{
+ return;
auto local = getBackendAddress("1", 80);
ClientState localCS(local, true, false, false, "", {});
auto tlsCtx = std::make_shared<MockupTLSCtx>();
@@ -1711,6 +1712,7 @@ BOOST_AUTO_TEST_CASE(test_IncomingConnection_BackendNoOOOR)
BOOST_AUTO_TEST_CASE(test_IncomingConnectionOOOR_BackendOOOR)
{
+ return;
auto local = getBackendAddress("1", 80);
ClientState localCS(local, true, false, false, "", {});
/* enable out-of-order on the front side */
@@ -3677,6 +3679,7 @@ BOOST_AUTO_TEST_CASE(test_IncomingConnectionOOOR_BackendOOOR)
BOOST_AUTO_TEST_CASE(test_IncomingConnectionOOOR_BackendNotOOOR)
{
+ return;
auto local = getBackendAddress("1", 80);
ClientState localCS(local, true, false, false, "", {});
/* enable out-of-order on the front side */

View file

@ -0,0 +1,26 @@
{ lib, rustPlatform, fetchCrate, stdenv, Security, libiconv, nixosTests }:
rustPlatform.buildRustPackage rec {
pname = "doh-proxy-rust";
version = "0.9.2";
src = fetchCrate {
inherit version;
crateName = "doh-proxy";
sha256 = "sha256-/637lR6OycVOOUVe29uFR1LtYIoFJ6gslDV9uAGkU1A=";
};
cargoSha256 = "sha256-tadTyWSuknAjosv7AvZF0/8FlHL/zcFT5LDW1KcMeHI=";
buildInputs = lib.optionals stdenv.isDarwin [ Security libiconv ];
passthru.tests = { inherit (nixosTests) doh-proxy-rust; };
meta = with lib; {
homepage = "https://github.com/jedisct1/doh-server";
description = "Fast, mature, secure DoH server proxy written in Rust";
license = with licenses; [ mit ];
maintainers = with maintainers; [ stephank ];
mainProgram = "doh-proxy";
};
}

View file

@ -0,0 +1,34 @@
{ lib, stdenv, fetchFromGitHub, cmake, gtest, c-ares, curl, libev }:
stdenv.mkDerivation rec {
pname = "https-dns-proxy";
# there are no stable releases (yet?)
version = "unstable-2021-03-29";
src = fetchFromGitHub {
owner = "aarond10";
repo = "https_dns_proxy";
rev = "bbd9ef272dcda3ead515871f594768af13192af7";
sha256 = "sha256-r+IpDklI3vITK8ZlZvIFm3JdDe2r8DK2ND3n1a/ThrM=";
};
nativeBuildInputs = [ cmake gtest ];
buildInputs = [ c-ares curl libev ];
installPhase = ''
install -Dm555 -t $out/bin https_dns_proxy
install -Dm444 -t $out/share/doc/${pname} ../{LICENSE,README}.*
'';
# upstream wants to add tests and the gtest framework is in place, so be ready
# for when that happens despite there being none as of right now
doCheck = true;
meta = with lib; {
description = "DNS to DNS over HTTPS (DoH) proxy";
license = licenses.mit;
maintainers = with maintainers; [ peterhoeg ];
platforms = platforms.linux;
};
}

View file

@ -0,0 +1,69 @@
{ lib, stdenv, fetchurl, pkg-config, gnutls, liburcu, lmdb, libcap_ng, libidn2, libunistring
, systemd, nettle, libedit, zlib, libiconv, libintl, libmaxminddb, libbpf, nghttp2, libmnl
, autoreconfHook, nixosTests, knot-resolver
}:
stdenv.mkDerivation rec {
pname = "knot-dns";
version = "3.1.8";
src = fetchurl {
url = "https://secure.nic.cz/files/knot-dns/knot-${version}.tar.xz";
sha256 = "767e458a56277a1270b359294c3be6c63fd734884d62a045e01756a46507aa94";
};
outputs = [ "bin" "out" "dev" ];
configureFlags = [
"--with-configdir=/etc/knot"
"--with-rundir=/run/knot"
"--with-storage=/var/lib/knot"
];
patches = [
# Don't try to create directories like /var/lib/knot at build time.
# They are later created from NixOS itself.
./dont-create-run-time-dirs.patch
./runtime-deps.patch
];
nativeBuildInputs = [ pkg-config autoreconfHook ];
buildInputs = [
gnutls liburcu libidn2 libunistring
nettle libedit
libiconv lmdb libintl
nghttp2 # DoH support in kdig
libmaxminddb # optional for geoip module (it's tiny)
# without sphinx &al. for developer documentation
# TODO: add dnstap support?
] ++ lib.optionals stdenv.isLinux [
libcap_ng systemd
libbpf libmnl # XDP support (it's Linux kernel API)
] ++ lib.optional stdenv.isDarwin zlib; # perhaps due to gnutls
enableParallelBuilding = true;
CFLAGS = [ "-O2" "-DNDEBUG" ];
doCheck = true;
checkFlags = "V=1"; # verbose output in case some test fails
doInstallCheck = true;
postInstall = ''
rm -r "$out"/lib/*.la
'';
passthru.tests = {
inherit knot-resolver;
} // lib.optionalAttrs stdenv.isLinux {
inherit (nixosTests) knot;
};
meta = with lib; {
description = "Authoritative-only DNS server from .cz domain registry";
homepage = "https://knot-dns.cz";
license = licenses.gpl3Plus;
platforms = platforms.unix;
maintainers = [ maintainers.vcunat ];
};
}

View file

@ -0,0 +1,32 @@
diff --git a/samples/Makefile.am b/samples/Makefile.am
index c253c91..107401d 100644
--- a/samples/Makefile.am
+++ b/samples/Makefile.am
@@ -19,11 +19,6 @@ EXTRA_DIST = knot.sample.conf.in example.com.zone
if HAVE_DAEMON
-install-data-local: knot.sample.conf
- if [ \! -f $(DESTDIR)/$(config_dir)/knot.sample.conf ]; then \
- $(INSTALL) -d $(DESTDIR)/$(config_dir); \
- $(INSTALL_DATA) knot.sample.conf $(srcdir)/example.com.zone $(DESTDIR)/$(config_dir); \
- fi
uninstall-local:
-rm -rf $(DESTDIR)/$(config_dir)/knot.sample.conf \
$(DESTDIR)/$(config_dir)/example.com.zone
diff --git a/src/utils/Makefile.inc b/src/utils/Makefile.inc
index e6765d9..d859d23 100644
--- a/src/utils/Makefile.inc
+++ b/src/utils/Makefile.inc
@@ -79,11 +79,6 @@ endif HAVE_DNSTAP
endif HAVE_UTILS
if HAVE_DAEMON
-# Create storage and run-time directories
-install-data-hook:
- $(INSTALL) -d $(DESTDIR)/@config_dir@
- $(INSTALL) -d $(DESTDIR)/@run_dir@
- $(INSTALL) -d $(DESTDIR)/@storage_dir@
sbin_PROGRAMS = knotc knotd

View file

@ -0,0 +1,14 @@
Remove unnecessary runtime dependencies.
`knotc status configure` shows summary from the configure script,
but that contains also references like include paths.
Filter these at least in a crude way (whole lines).
--- a/configure.ac
+++ b/configure.ac
@@ -766,5 +766,5 @@ result_msg_base=" Knot DNS $VERSION
-result_msg_esc=$(echo -n "$result_msg_base" | sed '$!s/$/\\n/' | tr -d '\n')
+result_msg_esc=$(echo -n "$result_msg_base" | grep -Fv "$NIX_STORE" | sed '$!s/$/\\n/' | tr -d '\n')
AC_DEFINE_UNQUOTED([CONFIGURE_SUMMARY],["$result_msg_esc"],[Configure summary])

View file

@ -0,0 +1,124 @@
{ lib, stdenv, fetchurl
# native deps.
, runCommand, pkg-config, meson, ninja, makeWrapper
# build+runtime deps.
, knot-dns, luajitPackages, libuv, gnutls, lmdb
, systemd, libcap_ng, dns-root-data, nghttp2 # optionals, in principle
# test-only deps.
, cmocka, which, cacert
, extraFeatures ? false /* catch-all if defaults aren't enough */
}:
let # un-indented, over the whole file
result = if extraFeatures then wrapped-full else unwrapped;
inherit (lib) optional optionals optionalString;
lua = luajitPackages;
unwrapped = stdenv.mkDerivation rec {
pname = "knot-resolver";
version = "5.5.0";
src = fetchurl {
url = "https://secure.nic.cz/files/knot-resolver/${pname}-${version}.tar.xz";
sha256 = "4e6f48c74d955f143d603f6072670cb41ab9acdd95d4455d6e74b6908562c55a";
};
outputs = [ "out" "dev" ];
# Path fixups for the NixOS service.
postPatch = ''
patch meson.build <<EOF
@@ -50,2 +50,2 @@
-systemd_work_dir = prefix / get_option('localstatedir') / 'lib' / 'knot-resolver'
-systemd_cache_dir = prefix / get_option('localstatedir') / 'cache' / 'knot-resolver'
+systemd_work_dir = '/var/lib/knot-resolver'
+systemd_cache_dir = '/var/cache/knot-resolver'
EOF
# ExecStart can't be overwritten in overrides.
# We need that to use wrapped executable and correct config file.
sed '/^ExecStart=/d' -i systemd/kresd@.service.in
''
# some tests have issues with network sandboxing, apparently
+ optionalString doInstallCheck ''
echo 'os.exit(77)' > daemon/lua/trust_anchors.test/bootstrap.test.lua
sed -E '/^[[:blank:]]*test_(dstaddr|headers),?$/d' -i \
tests/config/doh2.test.lua modules/http/http_doh.test.lua
'';
preConfigure = ''
patchShebangs scripts/
'';
nativeBuildInputs = [ pkg-config meson ninja ];
# http://knot-resolver.readthedocs.io/en/latest/build.html#requirements
buildInputs = [ knot-dns lua.lua libuv gnutls lmdb ]
++ optionals stdenv.isLinux [ /*lib*/systemd libcap_ng ]
++ [ nghttp2 ]
## optional dependencies; TODO: dnstap
;
mesonFlags = [
"-Dkeyfile_default=${dns-root-data}/root.ds"
"-Droot_hints=${dns-root-data}/root.hints"
"-Dinstall_kresd_conf=disabled" # not really useful; examples are inside share/doc/
"--default-library=static" # not used by anyone
]
++ optional doInstallCheck "-Dunit_tests=enabled"
++ optional (doInstallCheck && !stdenv.isDarwin) "-Dconfig_tests=enabled"
++ optional stdenv.isLinux "-Dsystemd_files=enabled" # used by NixOS service
#"-Dextra_tests=enabled" # not suitable as in-distro tests; many deps, too.
;
postInstall = ''
rm "$out"/lib/libkres.a
rm "$out"/lib/knot-resolver/upgrade-4-to-5.lua # not meaningful on NixOS
'' + optionalString stdenv.targetPlatform.isLinux ''
rm -r "$out"/lib/sysusers.d/ # ATM more likely to harm than help
'';
doInstallCheck = with stdenv; hostPlatform == buildPlatform;
installCheckInputs = [ cmocka which cacert lua.cqueues lua.basexx lua.http ];
installCheckPhase = ''
meson test --print-errorlogs
'';
meta = with lib; {
description = "Caching validating DNS resolver, from .cz domain registry";
homepage = "https://knot-resolver.cz";
license = licenses.gpl3Plus;
platforms = platforms.unix;
maintainers = [ maintainers.vcunat /* upstream developer */ ];
};
};
wrapped-full = runCommand unwrapped.name
{
nativeBuildInputs = [ makeWrapper ];
buildInputs = with luajitPackages; [
# For http module, prefill module, trust anchor bootstrap.
# It brings lots of deps; some are useful elsewhere (e.g. cqueues).
http
# psl isn't in nixpkgs yet, but policy.slice_randomize_psl() seems not important.
];
preferLocalBuild = true;
allowSubstitutes = false;
}
''
mkdir -p "$out"/bin
makeWrapper '${unwrapped}/bin/kresd' "$out"/bin/kresd \
--set LUA_PATH "$LUA_PATH" \
--set LUA_CPATH "$LUA_CPATH"
ln -sr '${unwrapped}/share' "$out"/
ln -sr '${unwrapped}/lib' "$out"/ # useful in NixOS service
ln -sr "$out"/{bin,sbin}
echo "Checking that 'http' module loads, i.e. lua search paths work:"
echo "modules.load('http')" > test-http.lua
echo -e 'quit()' | env -i "$out"/bin/kresd -a 127.0.0.1#53535 -c test-http.lua
'';
in result

View file

@ -0,0 +1,112 @@
{ lib
, buildGoModule
, fetchFromGitHub
, nixosTests
, libcap
}:
let
# ncdns source
ncdns = fetchFromGitHub {
owner = "namecoin";
repo = "ncdns";
rev = "2a486311b0fe1a921af34aa3b31e6e4e0569accc";
sha256 = "01arwlycp1iia4bd3dgyn8dam1av2a7d9hv7f085n14l2i2aza7v";
};
# script to patch the crypto/x509 package
x509 = fetchFromGitHub {
owner = "namecoin";
repo = "x509-compressed";
rev = "fb9f2b7bc9fcba954d70f63857cc0c3841b1cf47";
sha256 = "1arkbpbzvhcmz5fhjqg34x2jbjnwmlisapk22rjki17qpamh7zks";
# ncdns must be put in a subdirectory for this to work.
postFetch = ''
cp -r --no-preserve=mode "${ncdns}" "$out/ncdns"
'';
};
in
buildGoModule {
pname = "ncdns";
version = "unstable-2020-07-18";
src = x509;
vendorSha256 = "02bqf6vkj5msk35sr5sklnqqd16n7gns7knzqslw077xrxiz7bsg";
# Override the go-modules fetcher derivation to apply
# upstream's patch of the crypto/x509 library.
modBuildPhase = ''
go mod init github.com/namecoin/x509-compressed
go generate ./...
go mod tidy
cd ncdns
go mod init github.com/namecoin/ncdns
go mod edit \
-replace github.com/coreos/go-systemd=github.com/coreos/go-systemd/v22@latest \
-replace github.com/namecoin/x509-compressed=$NIX_BUILD_TOP/source
go mod tidy
'';
# Copy over the lockfiles as well, because the source
# doesn't contain it. The fixed-output derivation is
# probably not reproducible anyway.
modInstallPhase = ''
mv -t vendor go.mod go.sum
cp -r --reflink=auto vendor "$out"
'';
buildInputs = [ libcap ];
# The fetcher derivation must run with a different
# $sourceRoot, but buildGoModule doesn't allow that,
# so we use this ugly hack.
unpackPhase = ''
runHook preUnpack
unpackFile "$src"
sourceRoot=$PWD/source/ncdns
chmod -R u+w -- "$sourceRoot"
cd $sourceRoot
runHook postUpack
'';
# Same as above: can't use `patches` because that would
# be also applied to the fetcher derivation, thus failing.
patchPhase = ''
runHook prePatch
patch -p1 < ${./fix-tpl-path.patch}
runHook postPatch
'';
preBuild = ''
chmod -R u+w vendor
mv -t . vendor/go.{mod,sum}
'';
preCheck = ''
# needed to run the ncdns test suite
ln -s $PWD/vendor ../../go/src
'';
postInstall = ''
mkdir -p "$out/share"
cp -r _doc "$out/share/doc"
cp -r _tpl "$out/share/tpl"
'';
meta = with lib; {
description = "Namecoin to DNS bridge daemon";
homepage = "https://github.com/namecoin/ncdns";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ rnhmjoj ];
};
passthru.tests.ncdns = nixosTests.ncdns;
}

View file

@ -0,0 +1,27 @@
This sets a default value for the tpl directory that works for Nixpkgs.
diff --git a/server/web.go b/server/web.go
index d024a42..0522d02 100644
--- a/server/web.go
+++ b/server/web.go
@@ -10,6 +10,7 @@ import "path/filepath"
import "time"
import "strings"
import "fmt"
+import "os"
var layoutTpl *template.Template
var mainPageTpl *template.Template
@@ -44,7 +45,11 @@ func deriveTemplate(filename string) (*template.Template, error) {
}
func (s *Server) tplFilename(filename string) string {
- td := filepath.Join(s.cfg.ConfigDir, "..", "tpl")
+ ex, err := os.Executable()
+ if err != nil {
+ panic(err)
+ }
+ td := filepath.Join(filepath.Dir(ex), "..", "share", "tpl")
if s.cfg.TplPath != "" {
td = s.cfg.TplPath
}

View file

@ -0,0 +1,66 @@
{ lib, stdenv, fetchurl, libevent, openssl, nixosTests
, bind8Stats ? false
, checking ? false
, ipv6 ? true
, mmap ? false
, minimalResponses ? true
, nsec3 ? true
, ratelimit ? false
, recvmmsg ? false
, rootServer ? false
, rrtypes ? false
, zoneStats ? false
, configFile ? "/etc/nsd/nsd.conf"
}:
stdenv.mkDerivation rec {
pname = "nsd";
version = "4.4.0";
src = fetchurl {
url = "https://www.nlnetlabs.nl/downloads/${pname}/${pname}-${version}.tar.gz";
sha256 = "sha256-z81v3Zk0TKWn73wpQMJBvO9HH8MlK6PcvUxX4GOOiDY=";
};
prePatch = ''
substituteInPlace nsd-control-setup.sh.in --replace openssl ${openssl}/bin/openssl
'';
buildInputs = [ libevent openssl ];
configureFlags =
let edf = c: o: if c then ["--enable-${o}"] else ["--disable-${o}"];
in edf bind8Stats "bind8-stats"
++ edf checking "checking"
++ edf ipv6 "ipv6"
++ edf mmap "mmap"
++ edf minimalResponses "minimal-responses"
++ edf nsec3 "nsec3"
++ edf ratelimit "ratelimit"
++ edf recvmmsg "recvmmsg"
++ edf rootServer "root-server"
++ edf rrtypes "draft-rrtypes"
++ edf zoneStats "zone-stats"
++ [ "--with-ssl=${openssl.dev}"
"--with-libevent=${libevent.dev}"
"--with-nsd_conf_file=${configFile}"
"--with-configdir=etc/nsd"
];
patchPhase = ''
sed 's@$(INSTALL_DATA) nsd.conf.sample $(DESTDIR)$(nsdconfigfile).sample@@g' -i Makefile.in
'';
passthru.tests = {
inherit (nixosTests) nsd;
};
meta = with lib; {
homepage = "http://www.nlnetlabs.nl";
description = "Authoritative only, high performance, simple and open source name server";
license = licenses.bsd3;
platforms = platforms.unix;
maintainers = [ maintainers.hrdinka ];
};
}

View file

@ -0,0 +1,42 @@
{ lib, stdenv, fetchurl, pkg-config, boost, nixosTests
, openssl, systemd, lua, luajit, protobuf
, enableProtoBuf ? false
}:
stdenv.mkDerivation rec {
pname = "pdns-recursor";
version = "4.6.2";
src = fetchurl {
url = "https://downloads.powerdns.com/releases/pdns-recursor-${version}.tar.bz2";
sha256 = "sha256-2mSYUHOf3XuvLfZFrMl3UszTkJc7VrjiUXHqew0lrSA=";
};
nativeBuildInputs = [ pkg-config ];
buildInputs = [
boost openssl systemd
lua luajit
] ++ lib.optional enableProtoBuf protobuf;
configureFlags = [
"--enable-reproducible"
"--enable-systemd"
];
enableParallelBuilding = true;
passthru.tests = {
inherit (nixosTests) pdns-recursor ncdns;
};
meta = with lib; {
description = "A recursive DNS server";
homepage = "https://www.powerdns.com/";
platforms = platforms.linux;
badPlatforms = [
"i686-linux" # a 64-bit time_t is needed
];
license = licenses.gpl2Only;
maintainers = with maintainers; [ rnhmjoj ];
};
}

View file

@ -0,0 +1,64 @@
{ lib, stdenv, fetchurl, pkg-config, nixosTests
, boost, libyamlcpp, libsodium, sqlite, protobuf, openssl, systemd
, mariadb-connector-c, postgresql, lua, openldap, geoip, curl, unixODBC, lmdb, tinycdb
}:
stdenv.mkDerivation rec {
pname = "powerdns";
version = "4.6.2";
src = fetchurl {
url = "https://downloads.powerdns.com/releases/pdns-${version}.tar.bz2";
hash = "sha256-9EOEiUS7Ebu0hQIhYTs6Af+1f+vyZx2myqVzYu4LGbg=";
};
# redact configure flags from version output to reduce closure size
patches = [ ./version.patch ];
nativeBuildInputs = [ pkg-config ];
buildInputs = [
boost mariadb-connector-c postgresql lua openldap sqlite protobuf geoip
libyamlcpp libsodium curl unixODBC openssl systemd lmdb tinycdb
];
# Configure phase requires 64-bit time_t even on 32-bit platforms.
NIX_CFLAGS_COMPILE = lib.optionals stdenv.hostPlatform.is32bit [
"-D_TIME_BITS=64" "-D_FILE_OFFSET_BITS=64"
];
configureFlags = [
"--disable-silent-rules"
"--enable-dns-over-tls"
"--enable-unit-tests"
"--enable-reproducible"
"--enable-tools"
"--enable-ixfrdist"
"--enable-systemd"
"--with-libsodium"
"--with-sqlite3"
"--with-libcrypto=${openssl.dev}"
];
# nix destroy with-modules arguments, when using configureFlags
preConfigure = ''
configureFlagsArray+=(
"--with-modules="
"--with-dynmodules=bind geoip gmysql godbc gpgsql gsqlite3 ldap lmdb lua2 pipe remote tinydns"
)
'';
enableParallelBuilding = true;
doCheck = true;
passthru.tests = {
nixos = nixosTests.powerdns;
};
meta = with lib; {
description = "Authoritative DNS server";
homepage = "https://www.powerdns.com";
platforms = platforms.unix;
broken = stdenv.isDarwin;
license = licenses.gpl2;
maintainers = with maintainers; [ mic92 disassembler nickcao ];
};
}

View file

@ -0,0 +1,13 @@
diff --git a/pdns/version.cc b/pdns/version.cc
index d8f5d40..1368481 100644
--- a/pdns/version.cc
+++ b/pdns/version.cc
@@ -155,7 +155,7 @@ void showBuildConfiguration()
#ifdef PDNS_CONFIG_ARGS
#define double_escape(s) #s
#define escape_quotes(s) double_escape(s)
- g_log<<Logger::Warning<<"Configured with: "<<escape_quotes(PDNS_CONFIG_ARGS)<<endl;
+ g_log<<Logger::Warning<<"Configured with: "<<"redacted"<<endl;
#undef escape_quotes
#undef double_escape
#endif

View file

@ -0,0 +1,40 @@
{ lib, buildGoModule, fetchFromGitHub, go-bindata, go-bindata-assetfs, nixosTests }:
buildGoModule rec {
pname = "documize-community";
version = "3.9.0";
src = fetchFromGitHub {
owner = "documize";
repo = "community";
rev = "v${version}";
sha256 = "sha256-Kv4BsFB08rkGRkePFIkjjuhK1TnLPS4m+PUlgKG5cTQ=";
};
vendorSha256 = null;
doCheck = false;
nativeBuildInputs = [ go-bindata go-bindata-assetfs ];
# This is really weird, but they've managed to screw up
# their folder structure enough, you can only build by
# literally cding into this folder.
preBuild = "cd edition";
subPackages = [ "." ];
passthru.tests = { inherit (nixosTests) documize; };
postInstall = ''
mv $out/bin/edition $out/bin/documize
'';
meta = with lib; {
description = "Open source Confluence alternative for internal & external docs built with Golang + EmberJS";
license = licenses.agpl3;
maintainers = with maintainers; [ ma27 elseym ];
mainProgram = "documize";
homepage = "https://www.documize.com/";
};
}

View file

@ -0,0 +1,90 @@
{ lib, stdenv,
fetchFromGitHub,
makeWrapper,
cmake,
python3,
openssl,
pkg-config,
mosquitto,
lua5_3,
sqlite,
jsoncpp,
zlib,
boost,
curl,
git,
libusb-compat-0_1,
cereal
}:
stdenv.mkDerivation rec {
pname = "domoticz";
version = "2022.1";
src = fetchFromGitHub {
owner = "domoticz";
repo = pname;
rev = version;
sha256 = "sha256-wPSmpk3YeA+dNjx2mBdRkP2Mx/1cfrQOMLV5H5Ti7qU=";
fetchSubmodules = true;
};
buildInputs = [
openssl
python3
mosquitto
lua5_3
sqlite
jsoncpp
boost
zlib
curl
git
libusb-compat-0_1
cereal
];
nativeBuildInputs = [
cmake
pkg-config
makeWrapper
];
cmakeFlags = [
"-DCMAKE_BUILD_TYPE=Release"
"-DUSE_BUILTIN_MQTT=false"
"-DUSE_BUILTIN_LUA=false"
"-DUSE_BUILTIN_SQLITE=false"
"-DUSE_BUILTIN_JSONCPP=false"
"-DUSE_BUILTIN_ZLIB=false"
"-DUSE_OPENSSL_STATIC=false"
"-DUSE_STATIC_BOOST=false"
"-DUSE_BUILTIN_MINIZIP=true"
];
installPhase = ''
mkdir -p $out/share/domoticz
cp -r $src/www $out/share/domoticz/
cp -r $src/Config $out/share/domoticz
cp -r $src/scripts $out/share/domoticz
cp -r $src/plugins $out/share/domoticz
mkdir -p $out/bin
cp domoticz $out/bin
wrapProgram $out/bin/domoticz --set LD_LIBRARY_PATH ${python3}/lib;
'';
meta = with lib; {
description = "Home automation system";
longDescription = ''
Domoticz is a home automation system that lets you monitor and configure
various devices like: lights, switches, various sensors/meters like
temperature, rain, wind, UV, electra, gas, water and much more
'';
maintainers = with maintainers; [ edcragg ];
homepage = "https://www.domoticz.com/";
license = licenses.gpl3Plus;
platforms = platforms.all;
broken = stdenv.isDarwin; # never built on Hydra https://hydra.nixos.org/job/nixpkgs/staging-next/domoticz.x86_64-darwin
};
}

View file

@ -0,0 +1,21 @@
{ lib, buildGoModule, fetchFromGitHub }:
buildGoModule {
pname = "duckling-proxy";
version = "2021-07-23-unstable";
src = fetchFromGitHub {
owner = "LukeEmmet";
repo = "duckling-proxy";
rev = "e2bfd73a60d7afa43f13a9d420d514131fee8fd1";
sha256 = "134hnfa4f5sb1z1j5684wmqzascsrlagx8z36i1470yggb00j4hr";
};
vendorSha256 = "0wxk1a5gn9a7q2kgq11a783rl5cziipzhndgp71i365y3p1ssqyf";
meta = with lib; {
description = "Gemini proxy to access the Small Web";
homepage = "https://github.com/LukeEmmet/duckling-proxy";
license = licenses.mit;
maintainers = with maintainers; [ kaction ];
};
}

View file

@ -0,0 +1,28 @@
{ lib, buildGoModule, fetchFromGitHub }:
buildGoModule {
pname = "echoip";
version = "unstable-2019-07-12";
src = fetchFromGitHub {
owner = "mpolden";
repo = "echoip";
rev = "fb5fac92d2173c2a5b07ed4ecc7b5fefe8484ed2";
sha256 = "17gkh1qfxasvxy25lmjdwk5fsjkcp7lmw9si3xzf01m7qnj5zi4b";
};
vendorSha256 = "0vvs717pl5gzggxpbn2vkyxmpiw5zjdfnpbh8i81xidbqvlnm22h";
outputs = [ "out" "index" ];
postInstall = ''
mkdir -p $index
cp $src/index.html $index/index.html
'';
meta = with lib; {
homepage = "https://github.com/mpolden/echoip";
license = licenses.bsd3;
maintainers = with maintainers; [ rvolosatovs ];
};
}

View file

@ -0,0 +1,41 @@
{ lib, stdenv, fetchurl, jdk, jre, makeWrapper, runCommand, python3Packages, writeText }:
let
elasticmq-server = stdenv.mkDerivation rec {
pname = "elasticmq-server";
version = "1.2.0";
src = fetchurl {
url = "https://s3-eu-west-1.amazonaws.com/softwaremill-public/${pname}-${version}.jar";
sha256 = "06bn5ixz0pvvhfvavr6njv8c2i9pgd6gj32wnp2f0fn0z1kypn1f";
};
# don't do anything?
unpackPhase = "${jdk}/bin/jar xf $src favicon.png";
nativeBuildInputs = [ makeWrapper ];
installPhase = ''
mkdir -p $out/bin $out/share/elasticmq-server
cp $src $out/share/elasticmq-server/elasticmq-server.jar
# TODO: how to add extraArgs? current workaround is to use JAVA_TOOL_OPTIONS environment to specify properties
makeWrapper ${jre}/bin/java $out/bin/elasticmq-server \
--add-flags "-jar $out/share/elasticmq-server/elasticmq-server.jar"
'';
meta = with lib; {
homepage = "https://github.com/softwaremill/elasticmq";
description = "Message queueing system with Java, Scala and Amazon SQS-compatible interfaces";
sourceProvenance = with sourceTypes; [ binaryBytecode ];
license = licenses.asl20;
platforms = platforms.unix;
maintainers = with maintainers; [ peterromfeldhk ];
};
};
in elasticmq-server.overrideAttrs (_: {
passthru.tests.elasticmqTest = import ./elasticmq-test.nix {
inherit elasticmq-server runCommand python3Packages writeText;
};
})

View file

@ -0,0 +1,47 @@
{ elasticmq-server, python3Packages, runCommand, writeText}:
runCommand "${elasticmq-server.name}-tests" (let
commonPy = ''
import boto3
client = boto3.resource(
"sqs",
endpoint_url="http://localhost:9324",
region_name="elasticmq",
aws_secret_access_key="x",
aws_access_key_id="x",
use_ssl=False,
)
queue = client.get_queue_by_name(QueueName="foobar")
'';
in {
buildInputs = with python3Packages; [ python boto3 ];
emqConfig = writeText "emq-test.conf" ''
generate-node-address = true
queues {
foobar {}
}
'';
putMessagePy = writeText "put_message.py" ''
${commonPy}
queue.send_message(MessageBody="bazqux")
'';
checkMessagePy = writeText "check_message.py" ''
${commonPy}
messages = queue.receive_messages()
print(f"Received {messages!r}")
assert len(messages) == 1
assert messages[0].body == "bazqux"
'';
}) ''
JAVA_TOOL_OPTIONS="-Dconfig.file=$emqConfig" ${elasticmq-server}/bin/elasticmq-server &
SERVER_PID=$!
sleep 10
python $putMessagePy
python $checkMessagePy
touch $out
# needed on darwin
kill $SERVER_PID
''

View file

@ -0,0 +1,27 @@
{ lib
, buildGoModule
, fetchFromGitHub
}:
buildGoModule rec {
pname = "endlessh-go";
version = "20220308.1";
src = fetchFromGitHub {
owner = "shizunge";
repo = "endlessh-go";
rev = version;
hash = "sha256-U+h/WmTVwwUIBEOiNa/EKS6HvkeoGNmP3NpeP1fcqYw=";
};
vendorSha256 = "sha256-h/DpbXO+LUsB9NOAXUfNx3VOfEsiolfBEMBrAqVlU3A=";
proxyVendor = true;
meta = with lib; {
description = "An implementation of endlessh exporting Prometheus metrics";
homepage = "https://github.com/shizunge/endlessh-go";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ azahi ];
};
}

View file

@ -0,0 +1,24 @@
{ lib, stdenv, fetchFromGitHub }:
stdenv.mkDerivation rec {
pname = "endlessh";
version = "1.1";
src = fetchFromGitHub {
owner = "skeeto";
repo = pname;
rev = version;
sha256 = "0ziwr8j1frsp3dajr8h5glkm1dn5cci404kazz5w1jfrp0736x68";
};
makeFlags = [ "PREFIX=$(out)" ];
meta = with lib; {
description = "SSH tarpit that slowly sends an endless banner";
homepage = "https://github.com/skeeto/endlessh";
changelog = "https://github.com/skeeto/endlessh/releases/tag/${version}";
license = licenses.unlicense;
maintainers = [ maintainers.marsam ];
platforms = platforms.unix;
};
}

35
pkgs/servers/etcd/3.3.nix Normal file
View file

@ -0,0 +1,35 @@
{ lib, buildGoPackage, fetchFromGitHub, nixosTests }:
buildGoPackage rec {
pname = "etcd";
version = "3.3.27";
goPackagePath = "github.com/coreos/etcd";
src = fetchFromGitHub {
owner = "etcd-io";
repo = "etcd";
rev = "v${version}";
sha256 = "sha256-zO+gwzaTgeFHhlkY/3AvRTEA4Yltlp+NqdlDe4dLJYg=";
};
buildPhase = ''
cd go/src/${goPackagePath}
patchShebangs .
./build
./functional/build
'';
installPhase = ''
install -Dm755 bin/* bin/functional/cmd/* -t $out/bin
'';
passthru.tests = { inherit (nixosTests) etcd etcd-cluster; };
meta = with lib; {
description = "Distributed reliable key-value store for the most critical data of a distributed system";
license = licenses.asl20;
homepage = "https://etcd.io/";
maintainers = with maintainers; [ offline zowoq ];
};
}

34
pkgs/servers/etcd/3.4.nix Normal file
View file

@ -0,0 +1,34 @@
{ lib, buildGoModule, fetchFromGitHub }:
buildGoModule rec {
pname = "etcd";
version = "3.4.18";
vendorSha256 = null;
doCheck = false;
src = fetchFromGitHub {
owner = "etcd-io";
repo = "etcd";
rev = "v${version}";
sha256 = "sha256-/bXcW5g8mNFEjvfg+1loLFi8+IaWdcTE/lUPsHzEaIo=";
};
buildPhase = ''
patchShebangs .
./build
./functional/build
'';
installPhase = ''
install -Dm755 bin/* bin/functional/cmd/* -t $out/bin
'';
meta = with lib; {
description = "Distributed reliable key-value store for the most critical data of a distributed system";
license = licenses.asl20;
homepage = "https://etcd.io/";
maintainers = with maintainers; [ offline zowoq ];
};
}

82
pkgs/servers/etcd/3.5.nix Normal file
View file

@ -0,0 +1,82 @@
{ lib, buildGoModule, fetchFromGitHub, symlinkJoin }:
let
etcdVersion = "3.5.4";
etcdSrc = fetchFromGitHub {
owner = "etcd-io";
repo = "etcd";
rev = "v${etcdVersion}";
sha256 = "sha256-mTQHxLLfNiihvHg5zaTeVNWKuzvE0KBiJdY3qMJHMCM=";
};
commonMeta = with lib; {
description = "Distributed reliable key-value store for the most critical data of a distributed system";
license = licenses.asl20;
homepage = "https://etcd.io/";
maintainers = with maintainers; [ offline zowoq endocrimes ];
platforms = platforms.darwin ++ platforms.linux;
};
etcdserver = buildGoModule rec {
pname = "etcdserver";
version = etcdVersion;
vendorSha256 = "sha256-4djUQvWp9hScua9l1ZTq298zWSeDYRDojEt2AWmarzw=";
src = etcdSrc;
modRoot = "./server";
postBuild = ''
mv $GOPATH/bin/{server,etcd}
'';
CGO_ENABLED = 0;
# We set the GitSHA to `GitNotFound` to match official build scripts when
# git is unavailable. This is to avoid doing a full Git Checkout of etcd.
# User facing version numbers are still available in the binary, just not
# the sha it was built from.
ldflags = [ "-X go.etcd.io/etcd/api/v3/version.GitSHA=GitNotFound" ];
meta = commonMeta;
};
etcdutl = buildGoModule rec {
pname = "etcdutl";
version = etcdVersion;
vendorSha256 = "sha256-nk56XGpNsDwcGrTKithKGnPCX0NhpQmzNSXHk3vmdtg=";
src = etcdSrc;
modRoot = "./etcdutl";
CGO_ENABLED = 0;
meta = commonMeta;
};
etcdctl = buildGoModule rec {
pname = "etcdctl";
version = etcdVersion;
vendorSha256 = "sha256-WIMYrXfay6DMz+S/tIc/X4ffMizxub8GS1DDgIR40D4=";
src = etcdSrc;
modRoot = "./etcdctl";
CGO_ENABLED = 0;
meta = commonMeta;
};
in
symlinkJoin {
name = "etcd";
version = etcdVersion;
meta = commonMeta;
paths = [
etcdserver
etcdutl
etcdctl
];
}

View file

@ -0,0 +1,42 @@
{ lib, fetchFromGitHub, buildPythonPackage, aioredis, aiofiles, django_3
, fastapi, msgpack, pynacl, typing-extensions }:
buildPythonPackage rec {
pname = "etebase-server";
version = "0.8.3";
format = "other";
src = fetchFromGitHub {
owner = "etesync";
repo = "server";
rev = "v${version}";
sha256 = "sha256-rPs34uzb5veiOw74SACLrDm4Io0CYH9EL9IuV38CkPY=";
};
patches = [ ./secret.patch ];
propagatedBuildInputs = [
aioredis
aiofiles
django_3
fastapi
msgpack
pynacl
typing-extensions
];
installPhase = ''
mkdir -p $out/bin $out/lib
cp -r . $out/lib/etebase-server
ln -s $out/lib/etebase-server/manage.py $out/bin/etebase-server
wrapProgram $out/bin/etebase-server --prefix PYTHONPATH : "$PYTHONPATH"
chmod +x $out/bin/etebase-server
'';
meta = with lib; {
homepage = "https://github.com/etesync/server";
description = "An Etebase (EteSync 2.0) server so you can run your own.";
license = licenses.agpl3Only;
maintainers = with maintainers; [ felschr ];
};
}

View file

@ -0,0 +1,26 @@
diff --git a/etebase_server/settings.py b/etebase_server/settings.py
index 9baf8d3..501d9f6 100644
--- a/etebase_server/settings.py
+++ b/etebase_server/settings.py
@@ -23,11 +22,6 @@
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
-# SECURITY WARNING: keep the secret key used in production secret!
-# See secret.py for how this is generated; uses a file 'secret.txt' in the root
-# directory
-SECRET_FILE = os.path.join(BASE_DIR, "secret.txt")
-
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
@@ -143,7 +137,7 @@
section = config["global"]
- SECRET_FILE = section.get("secret_file", SECRET_FILE)
+ SECRET_FILE = section.get("secret_file", None)
STATIC_ROOT = section.get("static_root", STATIC_ROOT)
STATIC_URL = section.get("static_url", STATIC_URL)
MEDIA_ROOT = section.get("media_root", MEDIA_ROOT)

Some files were not shown because too many files have changed in this diff Show more