uboot: (firmwareOdroidC2/C4) don't invoke patch tool, use patches = [] instead
https://github.com/NixOS/nixpkgs/blob/master/pkgs/stdenv/generic/setup.sh#L948 this can do it nicely. Signed-off-by: Anton Arapov <anton@deadbeef.mx>
This commit is contained in:
commit
56de2bcd43
30691 changed files with 3076956 additions and 0 deletions
37
pkgs/build-support/setup-hooks/audit-blas.sh
Normal file
37
pkgs/build-support/setup-hooks/audit-blas.sh
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
# Ensure that we are always linking against “libblas.so.3” and
|
||||
# “liblapack.so.3”.
|
||||
|
||||
auditBlas() {
|
||||
local dir="$prefix"
|
||||
[ -e "$dir" ] || return 0
|
||||
|
||||
local i
|
||||
while IFS= read -r -d $'\0' i; do
|
||||
if ! isELF "$i"; then continue; fi
|
||||
|
||||
if $OBJDUMP -p "$i" | grep 'NEEDED' | awk '{ print $2; }' | grep -q '\(libmkl_rt.so\|libopenblas.so.0\)'; then
|
||||
echo "$i refers to a specific implementation of BLAS or LAPACK."
|
||||
echo "This prevents users from switching BLAS/LAPACK implementations."
|
||||
echo "Add \`blas' or \`lapack' to buildInputs instead of \`mkl' or \`openblas'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
(IFS=:
|
||||
for dir in "$(patchelf --print-rpath "$i")"; do
|
||||
if [ -f "$dir/libblas.so.3" ] || [ -f "$dir/libblas.so" ]; then
|
||||
if [ "$dir" != "@blas@/lib" ]; then
|
||||
echo "$dir is not allowed to contain a library named libblas.so.3"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
if [ -f "$dir/liblapack.so.3" ] || [ -f "$dir/liblapack.so" ]; then
|
||||
if [ "$dir" != "@lapack@/lib" ]; then
|
||||
echo "$dir is not allowed to contain a library named liblapack.so.3"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done)
|
||||
done < <(find "$dir" -type f -print0)
|
||||
}
|
||||
|
||||
fixupOutputHooks+=(auditBlas)
|
||||
41
pkgs/build-support/setup-hooks/audit-tmpdir.sh
Normal file
41
pkgs/build-support/setup-hooks/audit-tmpdir.sh
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
# Check whether RPATHs or wrapper scripts contain references to
|
||||
# $TMPDIR. This is a serious security bug because it allows any user
|
||||
# to inject files into search paths of other users' processes.
|
||||
#
|
||||
# It might be better to have Nix scan build output for any occurrence
|
||||
# of $TMPDIR (which would also be good for reproducibility), but at
|
||||
# the moment that would produce too many spurious errors (e.g. debug
|
||||
# info or assertion messages that refer to $TMPDIR).
|
||||
|
||||
fixupOutputHooks+=('if [[ -z "${noAuditTmpdir-}" && -e "$prefix" ]]; then auditTmpdir "$prefix"; fi')
|
||||
|
||||
auditTmpdir() {
|
||||
local dir="$1"
|
||||
[ -e "$dir" ] || return 0
|
||||
|
||||
header "checking for references to $TMPDIR/ in $dir..."
|
||||
|
||||
local i
|
||||
while IFS= read -r -d $'\0' i; do
|
||||
if [[ "$i" =~ .build-id ]]; then continue; fi
|
||||
|
||||
if isELF "$i"; then
|
||||
if { printf :; patchelf --print-rpath "$i"; } | grep -q -F ":$TMPDIR/"; then
|
||||
echo "RPATH of binary $i contains a forbidden reference to $TMPDIR/"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if isScript "$i"; then
|
||||
if [ -e "$(dirname "$i")/.$(basename "$i")-wrapped" ]; then
|
||||
if grep -q -F "$TMPDIR/" "$i"; then
|
||||
echo "wrapper script $i contains a forbidden reference to $TMPDIR/"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
done < <(find "$dir" -type f -print0)
|
||||
|
||||
stopNest
|
||||
}
|
||||
335
pkgs/build-support/setup-hooks/auto-patchelf.py
Normal file
335
pkgs/build-support/setup-hooks/auto-patchelf.py
Normal file
|
|
@ -0,0 +1,335 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import pprint
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from itertools import chain
|
||||
from pathlib import Path, PurePath
|
||||
from typing import DefaultDict, Iterator, List, Optional, Set, Tuple
|
||||
|
||||
from elftools.common.exceptions import ELFError # type: ignore
|
||||
from elftools.elf.dynamic import DynamicSection # type: ignore
|
||||
from elftools.elf.elffile import ELFFile # type: ignore
|
||||
from elftools.elf.enums import ENUM_E_TYPE, ENUM_EI_OSABI # type: ignore
|
||||
|
||||
|
||||
@contextmanager
|
||||
def open_elf(path: Path) -> Iterator[ELFFile]:
|
||||
with path.open('rb') as stream:
|
||||
yield ELFFile(stream)
|
||||
|
||||
|
||||
def is_static_executable(elf: ELFFile) -> bool:
|
||||
# Statically linked executables have an ELF type of EXEC but no INTERP.
|
||||
return (elf.header["e_type"] == 'ET_EXEC'
|
||||
and not elf.get_section_by_name(".interp"))
|
||||
|
||||
|
||||
def is_dynamic_executable(elf: ELFFile) -> bool:
|
||||
# We do not require an ELF type of EXEC. This also catches
|
||||
# position-independent executables, as they typically have an INTERP
|
||||
# section but their ELF type is DYN.
|
||||
return bool(elf.get_section_by_name(".interp"))
|
||||
|
||||
|
||||
def get_dependencies(elf: ELFFile) -> List[str]:
|
||||
dependencies = []
|
||||
# This convoluted code is here on purpose. For some reason, using
|
||||
# elf.get_section_by_name(".dynamic") does not always return an
|
||||
# instance of DynamicSection, but that is required to call iter_tags
|
||||
for section in elf.iter_sections():
|
||||
if isinstance(section, DynamicSection):
|
||||
for tag in section.iter_tags('DT_NEEDED'):
|
||||
dependencies.append(tag.needed)
|
||||
break # There is only one dynamic section
|
||||
|
||||
return dependencies
|
||||
|
||||
|
||||
def get_rpath(elf: ELFFile) -> List[str]:
|
||||
# This convoluted code is here on purpose. For some reason, using
|
||||
# elf.get_section_by_name(".dynamic") does not always return an
|
||||
# instance of DynamicSection, but that is required to call iter_tags
|
||||
for section in elf.iter_sections():
|
||||
if isinstance(section, DynamicSection):
|
||||
for tag in section.iter_tags('DT_RUNPATH'):
|
||||
return tag.runpath.split(':')
|
||||
|
||||
for tag in section.iter_tags('DT_RPATH'):
|
||||
return tag.rpath.split(':')
|
||||
|
||||
break # There is only one dynamic section
|
||||
|
||||
return []
|
||||
|
||||
|
||||
def get_arch(elf: ELFFile) -> str:
|
||||
return elf.get_machine_arch()
|
||||
|
||||
|
||||
def get_osabi(elf: ELFFile) -> str:
|
||||
return elf.header["e_ident"]["EI_OSABI"]
|
||||
|
||||
|
||||
def osabi_are_compatible(wanted: str, got: str) -> bool:
|
||||
"""
|
||||
Tests whether two OS ABIs are compatible, taking into account the
|
||||
generally accepted compatibility of SVR4 ABI with other ABIs.
|
||||
"""
|
||||
if not wanted or not got:
|
||||
# One of the types couldn't be detected, so as a fallback we'll
|
||||
# assume they're compatible.
|
||||
return True
|
||||
|
||||
# Generally speaking, the base ABI (0x00), which is represented by
|
||||
# readelf(1) as "UNIX - System V", indicates broad compatibility
|
||||
# with other ABIs.
|
||||
#
|
||||
# TODO: This isn't always true. For example, some OSes embed ABI
|
||||
# compatibility into SHT_NOTE sections like .note.tag and
|
||||
# .note.ABI-tag. It would be prudent to add these to the detection
|
||||
# logic to produce better ABI information.
|
||||
if wanted == 'ELFOSABI_SYSV':
|
||||
return True
|
||||
|
||||
# Similarly here, we should be able to link against a superset of
|
||||
# features, so even if the target has another ABI, this should be
|
||||
# fine.
|
||||
if got == 'ELFOSABI_SYSV':
|
||||
return True
|
||||
|
||||
# Otherwise, we simply return whether the ABIs are identical.
|
||||
return wanted == got
|
||||
|
||||
|
||||
def glob(path: Path, pattern: str, recursive: bool) -> Iterator[Path]:
|
||||
return path.rglob(pattern) if recursive else path.glob(pattern)
|
||||
|
||||
|
||||
cached_paths: Set[Path] = set()
|
||||
soname_cache: DefaultDict[Tuple[str, str], List[Tuple[Path, str]]] = defaultdict(list)
|
||||
|
||||
|
||||
def populate_cache(initial: List[Path], recursive: bool =False) -> None:
|
||||
lib_dirs = list(initial)
|
||||
|
||||
while lib_dirs:
|
||||
lib_dir = lib_dirs.pop(0)
|
||||
|
||||
if lib_dir in cached_paths:
|
||||
continue
|
||||
|
||||
cached_paths.add(lib_dir)
|
||||
|
||||
for path in glob(lib_dir, "*.so*", recursive):
|
||||
if not path.is_file():
|
||||
continue
|
||||
|
||||
resolved = path.resolve()
|
||||
try:
|
||||
with open_elf(path) as elf:
|
||||
osabi = get_osabi(elf)
|
||||
arch = get_arch(elf)
|
||||
rpath = [Path(p) for p in get_rpath(elf)
|
||||
if p and '$ORIGIN' not in p]
|
||||
lib_dirs += rpath
|
||||
soname_cache[(path.name, arch)].append((resolved.parent, osabi))
|
||||
|
||||
except ELFError:
|
||||
# Not an ELF file in the right format
|
||||
pass
|
||||
|
||||
|
||||
def find_dependency(soname: str, soarch: str, soabi: str) -> Optional[Path]:
|
||||
for lib, libabi in soname_cache[(soname, soarch)]:
|
||||
if osabi_are_compatible(soabi, libabi):
|
||||
return lib
|
||||
return None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Dependency:
|
||||
file: Path # The file that contains the dependency
|
||||
name: Path # The name of the dependency
|
||||
found: bool = False # Whether it was found somewhere
|
||||
|
||||
|
||||
def auto_patchelf_file(path: Path, runtime_deps: list[Path]) -> list[Dependency]:
|
||||
try:
|
||||
with open_elf(path) as elf:
|
||||
|
||||
if is_static_executable(elf):
|
||||
# No point patching these
|
||||
print(f"skipping {path} because it is statically linked")
|
||||
return []
|
||||
|
||||
if elf.num_segments() == 0:
|
||||
# no segment (e.g. object file)
|
||||
print(f"skipping {path} because it contains no segment")
|
||||
return []
|
||||
|
||||
file_arch = get_arch(elf)
|
||||
if interpreter_arch != file_arch:
|
||||
# Our target architecture is different than this file's
|
||||
# architecture, so skip it.
|
||||
print(f"skipping {path} because its architecture ({file_arch})"
|
||||
f" differs from target ({interpreter_arch})")
|
||||
return []
|
||||
|
||||
file_osabi = get_osabi(elf)
|
||||
if not osabi_are_compatible(interpreter_osabi, file_osabi):
|
||||
print(f"skipping {path} because its OS ABI ({file_osabi}) is"
|
||||
f" not compatible with target ({interpreter_osabi})")
|
||||
return []
|
||||
|
||||
file_is_dynamic_executable = is_dynamic_executable(elf)
|
||||
|
||||
file_dependencies = map(Path, get_dependencies(elf))
|
||||
|
||||
except ELFError:
|
||||
return []
|
||||
|
||||
rpath = []
|
||||
if file_is_dynamic_executable:
|
||||
print("setting interpreter of", path)
|
||||
subprocess.run(
|
||||
["patchelf", "--set-interpreter", interpreter_path.as_posix(), path.as_posix()],
|
||||
check=True)
|
||||
rpath += runtime_deps
|
||||
|
||||
print("searching for dependencies of", path)
|
||||
dependencies = []
|
||||
# Be sure to get the output of all missing dependencies instead of
|
||||
# failing at the first one, because it's more useful when working
|
||||
# on a new package where you don't yet know the dependencies.
|
||||
for dep in file_dependencies:
|
||||
if dep.is_absolute() and dep.is_file():
|
||||
# This is an absolute path. If it exists, just use it.
|
||||
# Otherwise, we probably want this to produce an error when
|
||||
# checked (because just updating the rpath won't satisfy
|
||||
# it).
|
||||
continue
|
||||
elif (libc_lib / dep).is_file():
|
||||
# This library exists in libc, and will be correctly
|
||||
# resolved by the linker.
|
||||
continue
|
||||
|
||||
if found_dependency := find_dependency(dep.name, file_arch, file_osabi):
|
||||
rpath.append(found_dependency)
|
||||
dependencies.append(Dependency(path, dep, True))
|
||||
print(f" {dep} -> found: {found_dependency}")
|
||||
else:
|
||||
dependencies.append(Dependency(path, dep, False))
|
||||
print(f" {dep} -> not found!")
|
||||
|
||||
# Dedup the rpath
|
||||
rpath_str = ":".join(dict.fromkeys(map(Path.as_posix, rpath)))
|
||||
|
||||
if rpath:
|
||||
print("setting RPATH to:", rpath_str)
|
||||
subprocess.run(
|
||||
["patchelf", "--set-rpath", rpath_str, path.as_posix()],
|
||||
check=True)
|
||||
|
||||
return dependencies
|
||||
|
||||
|
||||
def auto_patchelf(
|
||||
paths_to_patch: List[Path],
|
||||
lib_dirs: List[Path],
|
||||
runtime_deps: List[Path],
|
||||
recursive: bool =True,
|
||||
ignore_missing: List[str] = []) -> None:
|
||||
|
||||
if not paths_to_patch:
|
||||
sys.exit("No paths to patch, stopping.")
|
||||
|
||||
# Add all shared objects of the current output path to the cache,
|
||||
# before lib_dirs, so that they are chosen first in find_dependency.
|
||||
populate_cache(paths_to_patch, recursive)
|
||||
populate_cache(lib_dirs)
|
||||
|
||||
dependencies = []
|
||||
for path in chain.from_iterable(glob(p, '*', recursive) for p in paths_to_patch):
|
||||
if not path.is_symlink() and path.is_file():
|
||||
dependencies += auto_patchelf_file(path, runtime_deps)
|
||||
|
||||
missing = [dep for dep in dependencies if not dep.found]
|
||||
|
||||
# Print a summary of the missing dependencies at the end
|
||||
print(f"auto-patchelf: {len(missing)} dependencies could not be satisfied")
|
||||
failure = False
|
||||
for dep in missing:
|
||||
if dep.name.name in ignore_missing or "*" in ignore_missing:
|
||||
print(f"warn: auto-patchelf ignoring missing {dep.name} wanted by {dep.file}")
|
||||
else:
|
||||
print(f"error: auto-patchelf could not satisfy dependency {dep.name} wanted by {dep.file}")
|
||||
failure = True
|
||||
|
||||
if failure:
|
||||
sys.exit('auto-patchelf failed to find all the required dependencies.\n'
|
||||
'Add the missing dependencies to --libs or use '
|
||||
'`--ignore-missing="foo.so.1 bar.so etc.so"`.')
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="auto-patchelf",
|
||||
description='auto-patchelf tries as hard as possible to patch the'
|
||||
' provided binary files by looking for compatible'
|
||||
'libraries in the provided paths.')
|
||||
parser.add_argument(
|
||||
"--ignore-missing",
|
||||
nargs="*",
|
||||
type=str,
|
||||
help="Do not fail when some dependencies are not found.")
|
||||
parser.add_argument(
|
||||
"--no-recurse",
|
||||
dest="recursive",
|
||||
action="store_false",
|
||||
help="Patch only the provided paths, and ignore their children")
|
||||
parser.add_argument(
|
||||
"--paths", nargs="*", type=Path,
|
||||
help="Paths whose content needs to be patched.")
|
||||
parser.add_argument(
|
||||
"--libs", nargs="*", type=Path,
|
||||
help="Paths where libraries are searched for.")
|
||||
parser.add_argument(
|
||||
"--runtime-dependencies", nargs="*", type=Path,
|
||||
help="Paths to prepend to the runtime path of executable binaries.")
|
||||
|
||||
print("automatically fixing dependencies for ELF files")
|
||||
args = parser.parse_args()
|
||||
pprint.pprint(vars(args))
|
||||
|
||||
auto_patchelf(
|
||||
args.paths,
|
||||
args.libs,
|
||||
args.runtime_dependencies,
|
||||
args.recursive,
|
||||
args.ignore_missing)
|
||||
|
||||
|
||||
interpreter_path: Path = None # type: ignore
|
||||
interpreter_osabi: str = None # type: ignore
|
||||
interpreter_arch: str = None # type: ignore
|
||||
libc_lib: Path = None # type: ignore
|
||||
|
||||
if __name__ == "__main__":
|
||||
nix_support = Path(os.environ['NIX_BINTOOLS']) / 'nix-support'
|
||||
interpreter_path = Path((nix_support / 'dynamic-linker').read_text().strip())
|
||||
libc_lib = Path((nix_support / 'orig-libc').read_text().strip()) / 'lib'
|
||||
|
||||
with open_elf(interpreter_path) as interpreter:
|
||||
interpreter_osabi = get_osabi(interpreter)
|
||||
interpreter_arch = get_arch(interpreter)
|
||||
|
||||
if interpreter_arch and interpreter_osabi and interpreter_path and libc_lib:
|
||||
main()
|
||||
else:
|
||||
sys.exit("Failed to parse dynamic linker (ld) properties.")
|
||||
92
pkgs/build-support/setup-hooks/auto-patchelf.sh
Normal file
92
pkgs/build-support/setup-hooks/auto-patchelf.sh
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
declare -a autoPatchelfLibs
|
||||
declare -a extraAutoPatchelfLibs
|
||||
|
||||
gatherLibraries() {
|
||||
autoPatchelfLibs+=("$1/lib")
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2154
|
||||
# (targetOffset is referenced but not assigned.)
|
||||
addEnvHooks "$targetOffset" gatherLibraries
|
||||
|
||||
# Can be used to manually add additional directories with shared object files
|
||||
# to be included for the next autoPatchelf invocation.
|
||||
addAutoPatchelfSearchPath() {
|
||||
local -a findOpts=()
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--) shift; break;;
|
||||
--no-recurse) shift; findOpts+=("-maxdepth" 1);;
|
||||
--*)
|
||||
echo "addAutoPatchelfSearchPath: ERROR: Invalid command line" \
|
||||
"argument: $1" >&2
|
||||
return 1;;
|
||||
*) break;;
|
||||
esac
|
||||
done
|
||||
|
||||
local dir=
|
||||
while IFS= read -r -d '' dir; do
|
||||
extraAutoPatchelfLibs+=("$dir")
|
||||
done < <(find "$@" "${findOpts[@]}" \! -type d \
|
||||
\( -name '*.so' -o -name '*.so.*' \) -print0 \
|
||||
| sed -z 's#/[^/]*$##' \
|
||||
| uniq -z
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
autoPatchelf() {
|
||||
local norecurse=
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--) shift; break;;
|
||||
--no-recurse) shift; norecurse=1;;
|
||||
--*)
|
||||
echo "autoPatchelf: ERROR: Invalid command line" \
|
||||
"argument: $1" >&2
|
||||
return 1;;
|
||||
*) break;;
|
||||
esac
|
||||
done
|
||||
|
||||
local ignoreMissingDepsArray=($autoPatchelfIgnoreMissingDeps)
|
||||
if [ "$autoPatchelfIgnoreMissingDeps" == "1" ]; then
|
||||
echo "autoPatchelf: WARNING: setting 'autoPatchelfIgnoreMissingDeps" \
|
||||
"= true;' is deprecated and will be removed in a future release." \
|
||||
"Use 'autoPatchelfIgnoreMissingDeps = [ \"*\" ];' instead." >&2
|
||||
ignoreMissingDepsArray=( "*" )
|
||||
fi
|
||||
|
||||
local runtimeDependenciesArray=($runtimeDependencies)
|
||||
@pythonInterpreter@ @autoPatchelfScript@ \
|
||||
${norecurse:+--no-recurse} \
|
||||
--ignore-missing "${ignoreMissingDepsArray[@]}" \
|
||||
--paths "$@" \
|
||||
--libs "${autoPatchelfLibs[@]}" \
|
||||
"${extraAutoPatchelfLibs[@]}" \
|
||||
--runtime-dependencies "${runtimeDependenciesArray[@]/%//lib}"
|
||||
}
|
||||
|
||||
# XXX: This should ultimately use fixupOutputHooks but we currently don't have
|
||||
# a way to enforce the order. If we have $runtimeDependencies set, the setup
|
||||
# hook of patchelf is going to ruin everything and strip out those additional
|
||||
# RPATHs.
|
||||
#
|
||||
# So what we do here is basically run in postFixup and emulate the same
|
||||
# behaviour as fixupOutputHooks because the setup hook for patchelf is run in
|
||||
# fixupOutput and the postFixup hook runs later.
|
||||
#
|
||||
# shellcheck disable=SC2016
|
||||
# (Expressions don't expand in single quotes, use double quotes for that.)
|
||||
postFixupHooks+=('
|
||||
if [ -z "${dontAutoPatchelf-}" ]; then
|
||||
autoPatchelf -- $(for output in $outputs; do
|
||||
[ -e "${!output}" ] || continue
|
||||
echo "${!output}"
|
||||
done)
|
||||
fi
|
||||
')
|
||||
7
pkgs/build-support/setup-hooks/autoreconf.sh
Normal file
7
pkgs/build-support/setup-hooks/autoreconf.sh
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
preConfigurePhases="${preConfigurePhases:-} autoreconfPhase"
|
||||
|
||||
autoreconfPhase() {
|
||||
runHook preAutoreconf
|
||||
autoreconf ${autoreconfFlags:---install --force --verbose}
|
||||
runHook postAutoreconf
|
||||
}
|
||||
9
pkgs/build-support/setup-hooks/breakpoint-hook.sh
Normal file
9
pkgs/build-support/setup-hooks/breakpoint-hook.sh
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
breakpointHook() {
|
||||
local red='\033[0;31m'
|
||||
local no_color='\033[0m'
|
||||
|
||||
echo -e "${red}build failed in ${curPhase} with exit code ${exitCode}${no_color}"
|
||||
printf "To attach install cntr and run the following command as root:\n\n"
|
||||
sh -c "echo ' cntr attach -t command cntr-${out}'; while true; do sleep 99999999; done"
|
||||
}
|
||||
failureHooks+=(breakpointHook)
|
||||
17
pkgs/build-support/setup-hooks/canonicalize-jars.sh
Normal file
17
pkgs/build-support/setup-hooks/canonicalize-jars.sh
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
# This setup hook causes the fixup phase to repack all JAR files in a
|
||||
# canonical & deterministic fashion, e.g. resetting mtimes (like with normal
|
||||
# store files) and avoiding impure metadata.
|
||||
|
||||
fixupOutputHooks+=('if [ -z "$dontCanonicalizeJars" -a -e "$prefix" ]; then canonicalizeJarsIn "$prefix"; fi')
|
||||
|
||||
canonicalizeJarsIn() {
|
||||
local dir="$1"
|
||||
header "canonicalizing jars in $dir"
|
||||
dir="$(realpath -sm -- "$dir")"
|
||||
while IFS= read -rd '' f; do
|
||||
canonicalizeJar "$f"
|
||||
done < <(find -- "$dir" -type f -name '*.jar' -print0)
|
||||
stopNest
|
||||
}
|
||||
|
||||
source @canonicalize_jar@
|
||||
33
pkgs/build-support/setup-hooks/compress-man-pages.sh
Normal file
33
pkgs/build-support/setup-hooks/compress-man-pages.sh
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
fixupOutputHooks+=('if [ -z "${dontGzipMan-}" ]; then compressManPages "$prefix"; fi')
|
||||
|
||||
compressManPages() {
|
||||
local dir="$1"
|
||||
|
||||
if [ -L "$dir"/share ] || [ -L "$dir"/share/man ] || [ ! -d "$dir/share/man" ]
|
||||
then return
|
||||
fi
|
||||
echo "gzipping man pages under $dir/share/man/"
|
||||
|
||||
# Compress all uncompressed manpages. Don't follow symlinks, etc.
|
||||
find "$dir"/share/man/ -type f -a '!' -regex '.*\.\(bz2\|gz\|xz\)$' -print0 \
|
||||
| while IFS= read -r -d $'\0' f
|
||||
do
|
||||
if gzip -c -n "$f" > "$f".gz; then
|
||||
rm "$f"
|
||||
else
|
||||
rm "$f".gz
|
||||
fi
|
||||
done
|
||||
|
||||
# Point symlinks to compressed manpages.
|
||||
find "$dir"/share/man/ -type l -a '!' -regex '.*\.\(bz2\|gz\|xz\)$' -print0 \
|
||||
| sort -z \
|
||||
| while IFS= read -r -d $'\0' f
|
||||
do
|
||||
local target
|
||||
target="$(readlink -f "$f")"
|
||||
if [ -f "$target".gz ]; then
|
||||
ln -sf "$target".gz "$f".gz && rm "$f"
|
||||
fi
|
||||
done
|
||||
}
|
||||
43
pkgs/build-support/setup-hooks/copy-desktop-items.sh
Normal file
43
pkgs/build-support/setup-hooks/copy-desktop-items.sh
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
# shellcheck shell=bash
|
||||
|
||||
# Setup hook that installs specified desktop items.
|
||||
#
|
||||
# Example usage in a derivation:
|
||||
#
|
||||
# { …, makeDesktopItem, copyDesktopItems, … }:
|
||||
#
|
||||
# let desktopItem = makeDesktopItem { … }; in
|
||||
# stdenv.mkDerivation {
|
||||
# …
|
||||
# nativeBuildInputs = [ copyDesktopItems ];
|
||||
#
|
||||
# desktopItems = [ desktopItem ];
|
||||
# …
|
||||
# }
|
||||
#
|
||||
# This hook will copy files which are either given by full path
|
||||
# or all '*.desktop' files placed inside the 'share/applications'
|
||||
# folder of each `desktopItems` argument.
|
||||
|
||||
postInstallHooks+=(copyDesktopItems)
|
||||
|
||||
copyDesktopItems() {
|
||||
if [ "${dontCopyDesktopItems-}" = 1 ]; then return; fi
|
||||
|
||||
if [ -z "$desktopItems" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
applications="${!outputBin}/share/applications"
|
||||
for desktopItem in $desktopItems; do
|
||||
if [[ -f "$desktopItem" ]]; then
|
||||
echo "Copying '$desktopItem' into '${applications}'"
|
||||
install -D -m 444 -t "${applications}" "$desktopItem"
|
||||
else
|
||||
for f in "$desktopItem"/share/applications/*.desktop; do
|
||||
echo "Copying '$f' into '${applications}'"
|
||||
install -D -m 444 -t "${applications}" "$f"
|
||||
done
|
||||
fi
|
||||
done
|
||||
}
|
||||
219
pkgs/build-support/setup-hooks/desktop-to-darwin-bundle.sh
Normal file
219
pkgs/build-support/setup-hooks/desktop-to-darwin-bundle.sh
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
#!/usr/bin/env bash
|
||||
fixupOutputHooks+=('convertDesktopFiles $prefix')
|
||||
|
||||
# Get a param out of a desktop file. First parameter is the file and the second
|
||||
# is a pattern of the key who's value we should fetch.
|
||||
getDesktopParam() {
|
||||
local file="$1";
|
||||
local pattern="$2";
|
||||
|
||||
awk -F "=" "/${pattern}/ {print \$2}" "${file}"
|
||||
}
|
||||
|
||||
# Convert a freedesktop.org icon theme for a given app to a .icns file. When possible, missing
|
||||
# icons are synthesized from SVG or rescaled from existing ones (when within the size threshold).
|
||||
convertIconTheme() {
|
||||
local -r out=$1
|
||||
local -r sharePath=$2
|
||||
local -r iconName=$3
|
||||
local -r theme=${4:-hicolor}
|
||||
|
||||
# Sizes based on archived Apple documentation:
|
||||
# https://developer.apple.com/design/human-interface-guidelines/macos/icons-and-images/app-icon#app-icon-sizes
|
||||
local -ra iconSizes=(16 32 128 256 512)
|
||||
local -ra scales=([1]="" [2]="@2")
|
||||
|
||||
# Based loosely on the algorithm at:
|
||||
# https://specifications.freedesktop.org/icon-theme-spec/icon-theme-spec-latest.html#icon_lookup
|
||||
# Assumes threshold = 2 for ease of implementation.
|
||||
function findIcon() {
|
||||
local -r iconSize=$1
|
||||
local -r scale=$2
|
||||
|
||||
local scaleSuffix=${scales[$scale]}
|
||||
local exactSize=${iconSize}x${iconSize}${scaleSuffix}
|
||||
|
||||
local -a validSizes=(
|
||||
${exactSize}
|
||||
$((iconSize + 1))x$((iconSize + 1))${scaleSuffix}
|
||||
$((iconSize + 2))x$((iconSize + 2))${scaleSuffix}
|
||||
$((iconSize - 1))x$((iconSize - 1))${scaleSuffix}
|
||||
$((iconSize - 2))x$((iconSize - 2))${scaleSuffix}
|
||||
)
|
||||
|
||||
for iconIndex in "${!candidateIcons[@]}"; do
|
||||
for maybeSize in "${validSizes[@]}"; do
|
||||
icon=${candidateIcons[$iconIndex]}
|
||||
if [[ $icon = */$maybeSize/* ]]; then
|
||||
if [[ $maybeSize = $exactSize ]]; then
|
||||
echo "fixed $icon"
|
||||
else
|
||||
echo "threshold $icon"
|
||||
fi
|
||||
elif [[ -a $icon ]]; then
|
||||
echo "fallback $icon"
|
||||
fi
|
||||
return 0
|
||||
done
|
||||
done
|
||||
echo "scalable"
|
||||
}
|
||||
|
||||
function resizeIcon() {
|
||||
local -r in=$1
|
||||
local -r out=$2
|
||||
local -r iconSize=$3
|
||||
local -r scale=$4
|
||||
|
||||
local density=$((72 * scale))x$((72 * scale))
|
||||
local dim=$((iconSize * scale))
|
||||
|
||||
magick convert -scale "${dim}x${dim}" -density "$density" -units PixelsPerInch "$in" "$out"
|
||||
}
|
||||
|
||||
function synthesizeIcon() {
|
||||
local -r in=$1
|
||||
local -r out=$2
|
||||
local -r iconSize=$3
|
||||
local -r scale=$4
|
||||
|
||||
if [[ $in != '-' ]]; then
|
||||
local density=$((72 * scale))x$((72 * scale))
|
||||
local dim=$((iconSize * scale))
|
||||
rsvg-convert --keep-aspect-ratio --width "$dim" --height "$dim" "$in" --output "$out"
|
||||
magick convert -density "$density" -units PixelsPerInch "$out" "$out"
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function getIcons() {
|
||||
local -r sharePath=$1
|
||||
local -r iconname=$2
|
||||
local -r theme=$3
|
||||
local -r resultdir=$(mktemp -d)
|
||||
|
||||
local -ar candidateIcons=(
|
||||
"${sharePath}/icons/${theme}/"*"/${iconname}.png"
|
||||
"${sharePath}/icons/${theme}/"*"/${iconname}.xpm"
|
||||
)
|
||||
|
||||
local -a scalableIcon=("${sharePath}/icons/${theme}/scalable/${iconname}.svg"*)
|
||||
if [[ ${#scalableIcon[@]} = 0 ]]; then
|
||||
scalableIcon=('-')
|
||||
fi
|
||||
|
||||
# Tri-state variable, NONE means no icons have been found, an empty
|
||||
# icns file will be generated, not sure that's necessary because macOS
|
||||
# will default to a generic icon if no icon can be found.
|
||||
#
|
||||
# OTHER means an appropriate icon was found.
|
||||
#
|
||||
# Any other value is a path to an icon file that isn't scalable or
|
||||
# within the threshold. This is used as a fallback in case no better
|
||||
# icon can be found and will be scaled as much as
|
||||
# necessary to result in appropriate icon sizes.
|
||||
local foundIcon=NONE
|
||||
for iconSize in "${iconSizes[@]}"; do
|
||||
for scale in "${!scales[@]}"; do
|
||||
local iconResult=$(findIcon $iconSize $scale)
|
||||
local type=${iconResult%% *}
|
||||
local icon=${iconResult#* }
|
||||
local scaleSuffix=${scales[$scale]}
|
||||
local result=${resultdir}/${iconSize}x${iconSize}${scales[$scale]}${scaleSuffix:+x}.png
|
||||
case $type in
|
||||
fixed)
|
||||
local density=$((72 * scale))x$((72 * scale))
|
||||
magick convert -density "$density" -units PixelsPerInch "$icon" "$result"
|
||||
foundIcon=OTHER
|
||||
;;
|
||||
threshold)
|
||||
# Synthesize an icon of the exact size if a scalable icon is available
|
||||
# instead of scaling one and ending up with a fuzzy icon.
|
||||
if ! synthesizeIcon "${scalableIcon[0]}" "$result" "$iconSize" "$scale"; then
|
||||
resizeIcon "$icon" "$result" "$iconSize" "$scale"
|
||||
fi
|
||||
foundIcon=OTHER
|
||||
;;
|
||||
scalable)
|
||||
synthesizeIcon "${scalableIcon[0]}" "$result" "$iconSize" "$scale" || true
|
||||
foundIcon=OTHER
|
||||
;;
|
||||
fallback)
|
||||
# Use the largest size available to scale to
|
||||
# appropriate sizes.
|
||||
if [[ $foundIcon != OTHER ]]; then
|
||||
foundIcon=$icon
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
done
|
||||
done
|
||||
if [[ $foundIcon != NONE && $foundIcon != OTHER ]]; then
|
||||
# Ideally we'd only resize to whatever the closest sizes are,
|
||||
# starting from whatever icon sizes are available.
|
||||
for iconSize in 16 32 128 256 512; do
|
||||
local result=${resultdir}/${iconSize}x${iconSize}.png
|
||||
resizeIcon "$foundIcon" "$result" "$iconSize" 1
|
||||
done
|
||||
fi
|
||||
echo "$resultdir"
|
||||
}
|
||||
|
||||
iconsdir=$(getIcons "$sharePath" "apps/${iconName}" "$theme")
|
||||
if [[ -n "$(ls -A1 "$iconsdir")" ]]; then
|
||||
icnsutil compose --toc "$out/${iconName}.icns" "$iconsdir/"*
|
||||
else
|
||||
echo "Warning: no icons were found. Creating an empty icon for ${iconName}.icns."
|
||||
touch "$out/${iconName}.icns"
|
||||
fi
|
||||
}
|
||||
|
||||
processExecFieldCodes() {
|
||||
local -r file=$1
|
||||
local -r execRaw=$(getDesktopParam "${file}" "Exec")
|
||||
local -r execNoK="${execRaw/\%k/${file}}"
|
||||
local -r execNoKC="${execNoK/\%c/$(getDesktopParam "${file}" "Name")}"
|
||||
local -r icon=$(getDesktopParam "${file}" "Icon")
|
||||
local -r execNoKCI="${execNoKC/\%i/${icon:+--icon }${icon}}"
|
||||
local -r execNoKCIfu="${execNoKCI/\%[fu]/\$1}"
|
||||
local -r exec="${execNoKCIfu/\%[FU]/\$@}"
|
||||
if [[ "$exec" != "$execRaw" ]]; then
|
||||
echo 1>&2 "desktopToDarwinBundle: Application bundles do not understand desktop entry field codes. Changed '$execRaw' to '$exec'."
|
||||
fi
|
||||
echo "$exec"
|
||||
}
|
||||
|
||||
# For a given .desktop file, generate a darwin '.app' bundle for it.
|
||||
convertDesktopFile() {
|
||||
local -r file=$1
|
||||
local -r sharePath=$(dirname "$(dirname "$file")")
|
||||
local -r name=$(getDesktopParam "${file}" "^Name")
|
||||
local -r macOSExec=$(getDesktopParam "${file}" "X-macOS-Exec")
|
||||
if [[ "$macOSExec" ]]; then
|
||||
local -r exec="$macOSExec"
|
||||
else
|
||||
local -r exec=$(processExecFieldCodes "${file}")
|
||||
fi
|
||||
local -r iconName=$(getDesktopParam "${file}" "^Icon")
|
||||
local -r squircle=$(getDesktopParam "${file}" "X-macOS-SquircleIcon")
|
||||
|
||||
mkdir -p "${!outputBin}/Applications/${name}.app/Contents/MacOS"
|
||||
mkdir -p "${!outputBin}/Applications/${name}.app/Contents/Resources"
|
||||
|
||||
convertIconTheme "${!outputBin}/Applications/${name}.app/Contents/Resources" "$sharePath" "$iconName"
|
||||
|
||||
write-darwin-bundle "${!outputBin}" "$name" "$exec" "$iconName" "$squircle"
|
||||
}
|
||||
|
||||
convertDesktopFiles() {
|
||||
local dir="$1/share/applications/"
|
||||
|
||||
if [ -d "${dir}" ]; then
|
||||
for desktopFile in $(find "$dir" -iname "*.desktop"); do
|
||||
convertDesktopFile "$desktopFile";
|
||||
done
|
||||
fi
|
||||
}
|
||||
21
pkgs/build-support/setup-hooks/die.sh
Normal file
21
pkgs/build-support/setup-hooks/die.sh
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# Exit with backtrace and error message
|
||||
#
|
||||
# Usage: die "Error message"
|
||||
die() {
|
||||
# Let us be a little sloppy with errors, because otherwise the final
|
||||
# invocation of `caller` below will cause the script to exit.
|
||||
set +e
|
||||
|
||||
# Print our error message
|
||||
printf "\nBuilder called die: %b\n" "$*"
|
||||
printf "Backtrace:\n"
|
||||
|
||||
# Print a backtrace.
|
||||
local frame=0
|
||||
while caller $frame; do
|
||||
((frame++));
|
||||
done
|
||||
printf "\n"
|
||||
|
||||
exit 1
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
postPhases+=" cleanupBuildDir"
|
||||
|
||||
# Force GCC to build with coverage instrumentation. Also disable
|
||||
# optimisation, since it may confuse things.
|
||||
export NIX_CFLAGS_COMPILE="${NIX_CFLAGS_COMPILE:-} -O0 --coverage"
|
||||
|
||||
# Get rid of everything that isn't a gcno file or a C source file.
|
||||
# Also strip the `.tmp_' prefix from gcno files. (The Linux kernel
|
||||
# creates these.)
|
||||
cleanupBuildDir() {
|
||||
if ! [ -e $out/.build ]; then return; fi
|
||||
|
||||
find $out/.build/ -type f -a ! \
|
||||
\( -name "*.c" -o -name "*.cc" -o -name "*.cpp" -o -name "*.h" -o -name "*.hh" -o -name "*.y" -o -name "*.l" -o -name "*.gcno" \) \
|
||||
| xargs rm -f --
|
||||
|
||||
for i in $(find $out/.build/ -name ".tmp_*.gcno"); do
|
||||
mv "$i" "$(echo $i | sed s/.tmp_//)"
|
||||
done
|
||||
}
|
||||
22
pkgs/build-support/setup-hooks/find-xml-catalogs.sh
Normal file
22
pkgs/build-support/setup-hooks/find-xml-catalogs.sh
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
addXMLCatalogs () {
|
||||
local d i
|
||||
# ‘xml/dtd’ and ‘xml/xsl’ are deprecated. Catalogs should be
|
||||
# installed underneath ‘share/xml’.
|
||||
for d in $1/share/xml $1/xml/dtd $1/xml/xsl; do
|
||||
if [ -d $d ]; then
|
||||
for i in $(find $d -name catalog.xml); do
|
||||
XML_CATALOG_FILES+=" $i"
|
||||
done
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
if [ -z "${libxmlHookDone-}" ]; then
|
||||
libxmlHookDone=1
|
||||
|
||||
# Set up XML_CATALOG_FILES. An empty initial value prevents
|
||||
# xmllint and xsltproc from looking in /etc/xml/catalog.
|
||||
export XML_CATALOG_FILES=''
|
||||
if [ -z "$XML_CATALOG_FILES" ]; then XML_CATALOG_FILES=" "; fi
|
||||
addEnvHooks "$hostOffset" addXMLCatalogs
|
||||
fi
|
||||
40
pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh
Normal file
40
pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
# On macOS, binaries refer to dynamic library dependencies using
|
||||
# either relative paths (e.g. "libicudata.dylib", searched relative to
|
||||
# $DYLD_LIBRARY_PATH) or absolute paths
|
||||
# (e.g. "/nix/store/.../lib/libicudata.dylib"). In Nix, the latter is
|
||||
# preferred since it allows programs to just work. When linking
|
||||
# against a library (e.g. "-licudata"), the linker uses the install
|
||||
# name embedded in the dylib (which can be shown using "otool -D").
|
||||
# Most packages create dylibs with absolute install names, but some do
|
||||
# not. This setup hook fixes dylibs by setting their install names to
|
||||
# their absolute path (using "install_name_tool -id"). It also
|
||||
# rewrites references in other dylibs to absolute paths.
|
||||
|
||||
fixupOutputHooks+=('fixDarwinDylibNamesIn $prefix')
|
||||
|
||||
fixDarwinDylibNames() {
|
||||
local flags=()
|
||||
local old_id
|
||||
|
||||
for fn in "$@"; do
|
||||
flags+=(-change "$(basename "$fn")" "$fn")
|
||||
done
|
||||
|
||||
for fn in "$@"; do
|
||||
if [ -L "$fn" ]; then continue; fi
|
||||
echo "$fn: fixing dylib"
|
||||
int_out=$(@targetPrefix@install_name_tool -id "$fn" "${flags[@]}" "$fn" 2>&1)
|
||||
result=$?
|
||||
if [ "$result" -ne 0 ] &&
|
||||
! grep "shared library stub file and can't be changed" <<< "$out"
|
||||
then
|
||||
echo "$int_out" >&2
|
||||
exit "$result"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
fixDarwinDylibNamesIn() {
|
||||
local dir="$1"
|
||||
fixDarwinDylibNames $(find "$dir" -name "*.dylib")
|
||||
}
|
||||
11
pkgs/build-support/setup-hooks/gog-unpack.sh
Normal file
11
pkgs/build-support/setup-hooks/gog-unpack.sh
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
unpackPhase="unpackGog"
|
||||
|
||||
unpackGog() {
|
||||
runHook preUnpackGog
|
||||
|
||||
innoextract --silent --extract --exclude-temp "${src}"
|
||||
|
||||
find . -depth -print -execdir rename -f 'y/A-Z/a-z/' '{}' \;
|
||||
|
||||
runHook postUnpackGog
|
||||
}
|
||||
230
pkgs/build-support/setup-hooks/install-shell-files.sh
Normal file
230
pkgs/build-support/setup-hooks/install-shell-files.sh
Normal file
|
|
@ -0,0 +1,230 @@
|
|||
# shellcheck shell=bash
|
||||
# Setup hook for the `installShellFiles` package.
|
||||
#
|
||||
# Example usage in a derivation:
|
||||
#
|
||||
# { …, installShellFiles, … }:
|
||||
# stdenv.mkDerivation {
|
||||
# …
|
||||
# nativeBuildInputs = [ installShellFiles ];
|
||||
# postInstall = ''
|
||||
# installManPage share/doc/foobar.1
|
||||
# installShellCompletion share/completions/foobar.{bash,fish,zsh}
|
||||
# '';
|
||||
# …
|
||||
# }
|
||||
#
|
||||
# See comments on each function for more details.
|
||||
|
||||
# installManPage <path> [...<path>]
|
||||
#
|
||||
# Each argument is checked for its man section suffix and installed into the appropriate
|
||||
# share/man/man<n>/ directory. The function returns an error if any paths don't have the man
|
||||
# section suffix (with optional .gz compression).
|
||||
installManPage() {
|
||||
local path
|
||||
for path in "$@"; do
|
||||
if (( "${NIX_DEBUG:-0}" >= 1 )); then
|
||||
echo "installManPage: installing $path"
|
||||
fi
|
||||
if test -z "$path"; then
|
||||
echo "installManPage: error: path cannot be empty" >&2
|
||||
return 1
|
||||
fi
|
||||
local basename
|
||||
basename=$(stripHash "$path") # use stripHash in case it's a nix store path
|
||||
local trimmed=${basename%.gz} # don't get fooled by compressed manpages
|
||||
local suffix=${trimmed##*.}
|
||||
if test -z "$suffix" -o "$suffix" = "$trimmed"; then
|
||||
echo "installManPage: error: path missing manpage section suffix: $path" >&2
|
||||
return 1
|
||||
fi
|
||||
local outRoot
|
||||
if test "$suffix" = 3; then
|
||||
outRoot=${!outputDevman:?}
|
||||
else
|
||||
outRoot=${!outputMan:?}
|
||||
fi
|
||||
install -Dm644 -T "$path" "${outRoot}/share/man/man$suffix/$basename" || return
|
||||
done
|
||||
}
|
||||
|
||||
# installShellCompletion [--cmd <name>] ([--bash|--fish|--zsh] [--name <name>] <path>)...
|
||||
#
|
||||
# Each path is installed into the appropriate directory for shell completions for the given shell.
|
||||
# If one of `--bash`, `--fish`, or `--zsh` is given the path is assumed to belong to that shell.
|
||||
# Otherwise the file extension will be examined to pick a shell. If the shell is unknown a warning
|
||||
# will be logged and the command will return a non-zero status code after processing any remaining
|
||||
# paths. Any of the shell flags will affect all subsequent paths (unless another shell flag is
|
||||
# given).
|
||||
#
|
||||
# If the shell completion needs to be renamed before installing the optional `--name <name>` flag
|
||||
# may be given. Any name provided with this flag only applies to the next path.
|
||||
#
|
||||
# If all shell completions need to be renamed before installing the optional `--cmd <name>` flag
|
||||
# may be given. This will synthesize a name for each file, unless overridden with an explicit
|
||||
# `--name` flag. For example, `--cmd foobar` will synthesize the name `_foobar` for zsh and
|
||||
# `foobar.bash` for bash.
|
||||
#
|
||||
# For zsh completions, if the `--name` flag is not given, the path will be automatically renamed
|
||||
# such that `foobar.zsh` becomes `_foobar`.
|
||||
#
|
||||
# A path may be a named fd, such as produced by the bash construct `<(cmd)`. When using a named fd,
|
||||
# the shell type flag must be provided, and either the `--name` or `--cmd` flag must be provided.
|
||||
# This might look something like:
|
||||
#
|
||||
# installShellCompletion --zsh --name _foobar <($out/bin/foobar --zsh-completion)
|
||||
#
|
||||
# This command accepts multiple shell flags in conjunction with multiple paths if you wish to
|
||||
# install them all in one command:
|
||||
#
|
||||
# installShellCompletion share/completions/foobar.{bash,fish} --zsh share/completions/_foobar
|
||||
#
|
||||
# However it may be easier to read if each shell is split into its own invocation, especially when
|
||||
# renaming is involved:
|
||||
#
|
||||
# installShellCompletion --bash --name foobar.bash share/completions.bash
|
||||
# installShellCompletion --fish --name foobar.fish share/completions.fish
|
||||
# installShellCompletion --zsh --name _foobar share/completions.zsh
|
||||
#
|
||||
# Or to use shell newline escaping to split a single invocation across multiple lines:
|
||||
#
|
||||
# installShellCompletion --cmd foobar \
|
||||
# --bash <($out/bin/foobar --bash-completion) \
|
||||
# --fish <($out/bin/foobar --fish-completion) \
|
||||
# --zsh <($out/bin/foobar --zsh-completion)
|
||||
#
|
||||
# If any argument is `--` the remaining arguments will be treated as paths.
|
||||
installShellCompletion() {
|
||||
local shell='' name='' cmdname='' retval=0 parseArgs=1 arg
|
||||
while { arg=$1; shift; }; do
|
||||
# Parse arguments
|
||||
if (( parseArgs )); then
|
||||
case "$arg" in
|
||||
--bash|--fish|--zsh)
|
||||
shell=${arg#--}
|
||||
continue;;
|
||||
--name)
|
||||
name=$1
|
||||
shift || {
|
||||
echo 'installShellCompletion: error: --name flag expected an argument' >&2
|
||||
return 1
|
||||
}
|
||||
continue;;
|
||||
--name=*)
|
||||
# treat `--name=foo` the same as `--name foo`
|
||||
name=${arg#--name=}
|
||||
continue;;
|
||||
--cmd)
|
||||
cmdname=$1
|
||||
shift || {
|
||||
echo 'installShellCompletion: error: --cmd flag expected an argument' >&2
|
||||
return 1
|
||||
}
|
||||
continue;;
|
||||
--cmd=*)
|
||||
# treat `--cmd=foo` the same as `--cmd foo`
|
||||
cmdname=${arg#--cmd=}
|
||||
continue;;
|
||||
--?*)
|
||||
echo "installShellCompletion: warning: unknown flag ${arg%%=*}" >&2
|
||||
retval=2
|
||||
continue;;
|
||||
--)
|
||||
# treat remaining args as paths
|
||||
parseArgs=0
|
||||
continue;;
|
||||
esac
|
||||
fi
|
||||
if (( "${NIX_DEBUG:-0}" >= 1 )); then
|
||||
echo "installShellCompletion: installing $arg${name:+ as $name}"
|
||||
fi
|
||||
# if we get here, this is a path or named pipe
|
||||
# Identify shell and output name
|
||||
local curShell=$shell
|
||||
local outName=''
|
||||
if [[ -z "$arg" ]]; then
|
||||
echo "installShellCompletion: error: empty path is not allowed" >&2
|
||||
return 1
|
||||
elif [[ -p "$arg" ]]; then
|
||||
# this is a named fd or fifo
|
||||
if [[ -z "$curShell" ]]; then
|
||||
echo "installShellCompletion: error: named pipe requires one of --bash, --fish, or --zsh" >&2
|
||||
return 1
|
||||
elif [[ -z "$name" && -z "$cmdname" ]]; then
|
||||
echo "installShellCompletion: error: named pipe requires one of --cmd or --name" >&2
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
# this is a path
|
||||
local argbase
|
||||
argbase=$(stripHash "$arg")
|
||||
if [[ -z "$curShell" ]]; then
|
||||
# auto-detect the shell
|
||||
case "$argbase" in
|
||||
?*.bash) curShell=bash;;
|
||||
?*.fish) curShell=fish;;
|
||||
?*.zsh) curShell=zsh;;
|
||||
*)
|
||||
if [[ "$argbase" = _* && "$argbase" != *.* ]]; then
|
||||
# probably zsh
|
||||
echo "installShellCompletion: warning: assuming path \`$arg' is zsh; please specify with --zsh" >&2
|
||||
curShell=zsh
|
||||
else
|
||||
echo "installShellCompletion: warning: unknown shell for path: $arg" >&2
|
||||
retval=2
|
||||
continue
|
||||
fi;;
|
||||
esac
|
||||
fi
|
||||
outName=$argbase
|
||||
fi
|
||||
# Identify output path
|
||||
if [[ -n "$name" ]]; then
|
||||
outName=$name
|
||||
elif [[ -n "$cmdname" ]]; then
|
||||
case "$curShell" in
|
||||
bash|fish) outName=$cmdname.$curShell;;
|
||||
zsh) outName=_$cmdname;;
|
||||
*)
|
||||
# Our list of shells is out of sync with the flags we accept or extensions we detect.
|
||||
echo 'installShellCompletion: internal error' >&2
|
||||
return 1;;
|
||||
esac
|
||||
fi
|
||||
local sharePath
|
||||
case "$curShell" in
|
||||
bash) sharePath=bash-completion/completions;;
|
||||
fish) sharePath=fish/vendor_completions.d;;
|
||||
zsh)
|
||||
sharePath=zsh/site-functions
|
||||
# only apply automatic renaming if we didn't have a manual rename
|
||||
if [[ -z "$name" && -z "$cmdname" ]]; then
|
||||
# convert a name like `foo.zsh` into `_foo`
|
||||
outName=${outName%.zsh}
|
||||
outName=_${outName#_}
|
||||
fi;;
|
||||
*)
|
||||
# Our list of shells is out of sync with the flags we accept or extensions we detect.
|
||||
echo 'installShellCompletion: internal error' >&2
|
||||
return 1;;
|
||||
esac
|
||||
# Install file
|
||||
local outDir="${!outputBin:?}/share/$sharePath"
|
||||
local outPath="$outDir/$outName"
|
||||
if [[ -p "$arg" ]]; then
|
||||
# install handles named pipes on NixOS but not on macOS
|
||||
mkdir -p "$outDir" \
|
||||
&& cat "$arg" > "$outPath"
|
||||
else
|
||||
install -Dm644 -T "$arg" "$outPath"
|
||||
fi || return
|
||||
# Clear the per-path flags
|
||||
name=
|
||||
done
|
||||
if [[ -n "$name" ]]; then
|
||||
echo 'installShellCompletion: error: --name flag given with no path' >&2
|
||||
return 1
|
||||
fi
|
||||
return $retval
|
||||
}
|
||||
6
pkgs/build-support/setup-hooks/keep-build-tree.sh
Normal file
6
pkgs/build-support/setup-hooks/keep-build-tree.sh
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
prePhases+=" moveBuildDir"
|
||||
|
||||
moveBuildDir() {
|
||||
mkdir -p $out/.build
|
||||
cd $out/.build
|
||||
}
|
||||
5
pkgs/build-support/setup-hooks/ld-is-cc-hook.sh
Normal file
5
pkgs/build-support/setup-hooks/ld-is-cc-hook.sh
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
ld-is-cc-hook() {
|
||||
LD=$CC
|
||||
}
|
||||
|
||||
preConfigureHooks+=(ld-is-cc-hook)
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
{ stdenv
|
||||
, targetPackages
|
||||
, lib
|
||||
, makeSetupHook
|
||||
, dieHook
|
||||
, writeShellScript
|
||||
, tests
|
||||
, cc ? targetPackages.stdenv.cc
|
||||
, sanitizers ? []
|
||||
}:
|
||||
|
||||
makeSetupHook {
|
||||
deps = [ dieHook ]
|
||||
# https://github.com/NixOS/nixpkgs/issues/148189
|
||||
++ lib.optional (stdenv.isDarwin && stdenv.isAarch64) cc;
|
||||
|
||||
substitutions = {
|
||||
cc = "${cc}/bin/${cc.targetPrefix}cc ${lib.escapeShellArgs (map (s: "-fsanitize=${s}") sanitizers)}";
|
||||
|
||||
# Extract the function call used to create a binary wrapper from its embedded docstring
|
||||
passthru.extractCmd = writeShellScript "extract-binary-wrapper-cmd" ''
|
||||
strings -dw "$1" | sed -n '/^makeCWrapper/,/^$/ p'
|
||||
'';
|
||||
|
||||
passthru.tests = tests.makeBinaryWrapper;
|
||||
};
|
||||
} ./make-binary-wrapper.sh
|
||||
|
|
@ -0,0 +1,394 @@
|
|||
|
||||
set -euo pipefail
|
||||
|
||||
# Assert that FILE exists and is executable
|
||||
#
|
||||
# assertExecutable FILE
|
||||
assertExecutable() {
|
||||
local file="$1"
|
||||
[[ -f "$file" && -x "$file" ]] || \
|
||||
die "Cannot wrap '$file' because it is not an executable file"
|
||||
}
|
||||
|
||||
# Generate a binary executable wrapper for wrapping an executable.
|
||||
# The binary is compiled from generated C-code using gcc.
|
||||
# makeWrapper EXECUTABLE OUT_PATH ARGS
|
||||
|
||||
# ARGS:
|
||||
# --argv0 NAME : set the name of the executed process to NAME
|
||||
# (if unset or empty, defaults to EXECUTABLE)
|
||||
# --inherit-argv0 : the executable inherits argv0 from the wrapper.
|
||||
# (use instead of --argv0 '$0')
|
||||
# --set VAR VAL : add VAR with value VAL to the executable's environment
|
||||
# --set-default VAR VAL : like --set, but only adds VAR if not already set in
|
||||
# the environment
|
||||
# --unset VAR : remove VAR from the environment
|
||||
# --chdir DIR : change working directory (use instead of --run "cd DIR")
|
||||
# --add-flags FLAGS : add FLAGS to invocation of executable
|
||||
# TODO(@ncfavier): --append-flags
|
||||
|
||||
# --prefix ENV SEP VAL : suffix/prefix ENV with VAL, separated by SEP
|
||||
# --suffix
|
||||
|
||||
# To troubleshoot a binary wrapper after you compiled it,
|
||||
# use the `strings` command or open the binary file in a text editor.
|
||||
makeWrapper() { makeBinaryWrapper "$@"; }
|
||||
makeBinaryWrapper() {
|
||||
local NIX_CFLAGS_COMPILE= NIX_CFLAGS_LINK=
|
||||
local original="$1"
|
||||
local wrapper="$2"
|
||||
shift 2
|
||||
|
||||
assertExecutable "$original"
|
||||
|
||||
mkdir -p "$(dirname "$wrapper")"
|
||||
|
||||
makeDocumentedCWrapper "$original" "$@" | \
|
||||
@cc@ \
|
||||
-Wall -Werror -Wpedantic \
|
||||
-Wno-overlength-strings \
|
||||
-Os \
|
||||
-x c \
|
||||
-o "$wrapper" -
|
||||
}
|
||||
|
||||
# Syntax: wrapProgram <PROGRAM> <MAKE-WRAPPER FLAGS...>
|
||||
wrapProgram() { wrapProgramBinary "$@"; }
|
||||
wrapProgramBinary() {
|
||||
local prog="$1"
|
||||
local hidden
|
||||
|
||||
assertExecutable "$prog"
|
||||
|
||||
hidden="$(dirname "$prog")/.$(basename "$prog")"-wrapped
|
||||
while [ -e "$hidden" ]; do
|
||||
hidden="${hidden}_"
|
||||
done
|
||||
mv "$prog" "$hidden"
|
||||
makeWrapper "$hidden" "$prog" --inherit-argv0 "${@:2}"
|
||||
}
|
||||
|
||||
# Generate source code for the wrapper in such a way that the wrapper inputs
|
||||
# will still be readable even after compilation
|
||||
# makeDocumentedCWrapper EXECUTABLE ARGS
|
||||
# ARGS: same as makeWrapper
|
||||
makeDocumentedCWrapper() {
|
||||
local src docs
|
||||
src=$(makeCWrapper "$@")
|
||||
docs=$(docstring "$@")
|
||||
printf '%s\n\n' "$src"
|
||||
printf '%s\n' "$docs"
|
||||
}
|
||||
|
||||
# makeCWrapper EXECUTABLE ARGS
|
||||
# ARGS: same as makeWrapper
|
||||
makeCWrapper() {
|
||||
local argv0 inherit_argv0 n params cmd main flagsBefore flags executable length
|
||||
local uses_prefix uses_suffix uses_assert uses_assert_success uses_stdio uses_asprintf
|
||||
executable=$(escapeStringLiteral "$1")
|
||||
params=("$@")
|
||||
length=${#params[*]}
|
||||
for ((n = 1; n < length; n += 1)); do
|
||||
p="${params[n]}"
|
||||
case $p in
|
||||
--set)
|
||||
cmd=$(setEnv "${params[n + 1]}" "${params[n + 2]}")
|
||||
main="$main$cmd"$'\n'
|
||||
n=$((n + 2))
|
||||
[ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 2 arguments"$'\n'
|
||||
;;
|
||||
--set-default)
|
||||
cmd=$(setDefaultEnv "${params[n + 1]}" "${params[n + 2]}")
|
||||
main="$main$cmd"$'\n'
|
||||
uses_stdio=1
|
||||
uses_assert_success=1
|
||||
n=$((n + 2))
|
||||
[ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 2 arguments"$'\n'
|
||||
;;
|
||||
--unset)
|
||||
cmd=$(unsetEnv "${params[n + 1]}")
|
||||
main="$main$cmd"$'\n'
|
||||
uses_stdio=1
|
||||
uses_assert_success=1
|
||||
n=$((n + 1))
|
||||
[ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 1 argument"$'\n'
|
||||
;;
|
||||
--prefix)
|
||||
cmd=$(setEnvPrefix "${params[n + 1]}" "${params[n + 2]}" "${params[n + 3]}")
|
||||
main="$main$cmd"$'\n'
|
||||
uses_prefix=1
|
||||
uses_asprintf=1
|
||||
uses_stdio=1
|
||||
uses_assert_success=1
|
||||
uses_assert=1
|
||||
n=$((n + 3))
|
||||
[ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 3 arguments"$'\n'
|
||||
;;
|
||||
--suffix)
|
||||
cmd=$(setEnvSuffix "${params[n + 1]}" "${params[n + 2]}" "${params[n + 3]}")
|
||||
main="$main$cmd"$'\n'
|
||||
uses_suffix=1
|
||||
uses_asprintf=1
|
||||
uses_stdio=1
|
||||
uses_assert_success=1
|
||||
uses_assert=1
|
||||
n=$((n + 3))
|
||||
[ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 3 arguments"$'\n'
|
||||
;;
|
||||
--chdir)
|
||||
cmd=$(changeDir "${params[n + 1]}")
|
||||
main="$main$cmd"$'\n'
|
||||
uses_stdio=1
|
||||
uses_assert_success=1
|
||||
n=$((n + 1))
|
||||
[ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 1 argument"$'\n'
|
||||
;;
|
||||
--add-flags)
|
||||
flags="${params[n + 1]}"
|
||||
flagsBefore="$flagsBefore $flags"
|
||||
uses_assert=1
|
||||
n=$((n + 1))
|
||||
[ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 1 argument"$'\n'
|
||||
;;
|
||||
--argv0)
|
||||
argv0=$(escapeStringLiteral "${params[n + 1]}")
|
||||
inherit_argv0=
|
||||
n=$((n + 1))
|
||||
[ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 1 argument"$'\n'
|
||||
;;
|
||||
--inherit-argv0)
|
||||
# Whichever comes last of --argv0 and --inherit-argv0 wins
|
||||
inherit_argv0=1
|
||||
;;
|
||||
*) # Using an error macro, we will make sure the compiler gives an understandable error message
|
||||
main="$main#error makeCWrapper: Unknown argument ${p}"$'\n'
|
||||
;;
|
||||
esac
|
||||
done
|
||||
# shellcheck disable=SC2086
|
||||
[ -z "$flagsBefore" ] || main="$main"${main:+$'\n'}$(addFlags $flagsBefore)$'\n'$'\n'
|
||||
[ -z "$inherit_argv0" ] && main="${main}argv[0] = \"${argv0:-${executable}}\";"$'\n'
|
||||
main="${main}return execv(\"${executable}\", argv);"$'\n'
|
||||
|
||||
[ -z "$uses_asprintf" ] || printf '%s\n' "#define _GNU_SOURCE /* See feature_test_macros(7) */"
|
||||
printf '%s\n' "#include <unistd.h>"
|
||||
printf '%s\n' "#include <stdlib.h>"
|
||||
[ -z "$uses_assert" ] || printf '%s\n' "#include <assert.h>"
|
||||
[ -z "$uses_stdio" ] || printf '%s\n' "#include <stdio.h>"
|
||||
[ -z "$uses_assert_success" ] || printf '\n%s\n' "#define assert_success(e) do { if ((e) < 0) { perror(#e); abort(); } } while (0)"
|
||||
[ -z "$uses_prefix" ] || printf '\n%s\n' "$(setEnvPrefixFn)"
|
||||
[ -z "$uses_suffix" ] || printf '\n%s\n' "$(setEnvSuffixFn)"
|
||||
printf '\n%s' "int main(int argc, char **argv) {"
|
||||
printf '\n%s' "$(indent4 "$main")"
|
||||
printf '\n%s\n' "}"
|
||||
}
|
||||
|
||||
addFlags() {
|
||||
local result n flag flags var
|
||||
var="argv_tmp"
|
||||
flags=("$@")
|
||||
for ((n = 0; n < ${#flags[*]}; n += 1)); do
|
||||
flag=$(escapeStringLiteral "${flags[$n]}")
|
||||
result="$result${var}[$((n+1))] = \"$flag\";"$'\n'
|
||||
done
|
||||
printf '%s\n' "char **$var = calloc($((n+1)) + argc, sizeof(*$var));"
|
||||
printf '%s\n' "assert($var != NULL);"
|
||||
printf '%s\n' "${var}[0] = argv[0];"
|
||||
printf '%s' "$result"
|
||||
printf '%s\n' "for (int i = 1; i < argc; ++i) {"
|
||||
printf '%s\n' " ${var}[$n + i] = argv[i];"
|
||||
printf '%s\n' "}"
|
||||
printf '%s\n' "${var}[$n + argc] = NULL;"
|
||||
printf '%s\n' "argv = $var;"
|
||||
}
|
||||
|
||||
# chdir DIR
|
||||
changeDir() {
|
||||
local dir
|
||||
dir=$(escapeStringLiteral "$1")
|
||||
printf '%s' "assert_success(chdir(\"$dir\"));"
|
||||
}
|
||||
|
||||
# prefix ENV SEP VAL
|
||||
setEnvPrefix() {
|
||||
local env sep val
|
||||
env=$(escapeStringLiteral "$1")
|
||||
sep=$(escapeStringLiteral "$2")
|
||||
val=$(escapeStringLiteral "$3")
|
||||
printf '%s' "set_env_prefix(\"$env\", \"$sep\", \"$val\");"
|
||||
assertValidEnvName "$1"
|
||||
}
|
||||
|
||||
# suffix ENV SEP VAL
|
||||
setEnvSuffix() {
|
||||
local env sep val
|
||||
env=$(escapeStringLiteral "$1")
|
||||
sep=$(escapeStringLiteral "$2")
|
||||
val=$(escapeStringLiteral "$3")
|
||||
printf '%s' "set_env_suffix(\"$env\", \"$sep\", \"$val\");"
|
||||
assertValidEnvName "$1"
|
||||
}
|
||||
|
||||
# setEnv KEY VALUE
|
||||
setEnv() {
|
||||
local key value
|
||||
key=$(escapeStringLiteral "$1")
|
||||
value=$(escapeStringLiteral "$2")
|
||||
printf '%s' "putenv(\"$key=$value\");"
|
||||
assertValidEnvName "$1"
|
||||
}
|
||||
|
||||
# setDefaultEnv KEY VALUE
|
||||
setDefaultEnv() {
|
||||
local key value
|
||||
key=$(escapeStringLiteral "$1")
|
||||
value=$(escapeStringLiteral "$2")
|
||||
printf '%s' "assert_success(setenv(\"$key\", \"$value\", 0));"
|
||||
assertValidEnvName "$1"
|
||||
}
|
||||
|
||||
# unsetEnv KEY
|
||||
unsetEnv() {
|
||||
local key
|
||||
key=$(escapeStringLiteral "$1")
|
||||
printf '%s' "assert_success(unsetenv(\"$key\"));"
|
||||
assertValidEnvName "$1"
|
||||
}
|
||||
|
||||
# Makes it safe to insert STRING within quotes in a C String Literal.
|
||||
# escapeStringLiteral STRING
|
||||
escapeStringLiteral() {
|
||||
local result
|
||||
result=${1//$'\\'/$'\\\\'}
|
||||
result=${result//\"/'\"'}
|
||||
result=${result//$'\n'/"\n"}
|
||||
result=${result//$'\r'/"\r"}
|
||||
printf '%s' "$result"
|
||||
}
|
||||
|
||||
# Indents every non-empty line by 4 spaces. To avoid trailing whitespace, we don't indent empty lines
|
||||
# indent4 TEXT_BLOCK
|
||||
indent4() {
|
||||
printf '%s' "$1" | awk '{ if ($0 != "") { print " "$0 } else { print $0 }}'
|
||||
}
|
||||
|
||||
assertValidEnvName() {
|
||||
case "$1" in
|
||||
*=*) printf '\n%s\n' "#error Illegal environment variable name \`$1\` (cannot contain \`=\`)";;
|
||||
"") printf '\n%s\n' "#error Environment variable name can't be empty.";;
|
||||
esac
|
||||
}
|
||||
|
||||
setEnvPrefixFn() {
|
||||
printf '%s' "\
|
||||
void set_env_prefix(char *env, char *sep, char *prefix) {
|
||||
char *existing = getenv(env);
|
||||
if (existing) {
|
||||
char *val;
|
||||
assert_success(asprintf(&val, \"%s%s%s\", prefix, sep, existing));
|
||||
assert_success(setenv(env, val, 1));
|
||||
free(val);
|
||||
} else {
|
||||
assert_success(setenv(env, prefix, 1));
|
||||
}
|
||||
}
|
||||
"
|
||||
}
|
||||
|
||||
setEnvSuffixFn() {
|
||||
printf '%s' "\
|
||||
void set_env_suffix(char *env, char *sep, char *suffix) {
|
||||
char *existing = getenv(env);
|
||||
if (existing) {
|
||||
char *val;
|
||||
assert_success(asprintf(&val, \"%s%s%s\", existing, sep, suffix));
|
||||
assert_success(setenv(env, val, 1));
|
||||
free(val);
|
||||
} else {
|
||||
assert_success(setenv(env, suffix, 1));
|
||||
}
|
||||
}
|
||||
"
|
||||
}
|
||||
|
||||
# Embed a C string which shows up as readable text in the compiled binary wrapper,
|
||||
# giving instructions for recreating the wrapper.
|
||||
# Keep in sync with makeBinaryWrapper.extractCmd
|
||||
docstring() {
|
||||
printf '%s' "const char * DOCSTRING = \"$(escapeStringLiteral "
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------------
|
||||
# The C-code for this binary wrapper has been generated using the following command:
|
||||
|
||||
|
||||
makeCWrapper $(formatArgs "$@")
|
||||
|
||||
|
||||
# (Use \`nix-shell -p makeBinaryWrapper\` to get access to makeCWrapper in your shell)
|
||||
# ------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
")\";"
|
||||
}
|
||||
|
||||
# formatArgs EXECUTABLE ARGS
|
||||
formatArgs() {
|
||||
printf '%s' "${1@Q}"
|
||||
shift
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--set)
|
||||
formatArgsLine 2 "$@"
|
||||
shift 2
|
||||
;;
|
||||
--set-default)
|
||||
formatArgsLine 2 "$@"
|
||||
shift 2
|
||||
;;
|
||||
--unset)
|
||||
formatArgsLine 1 "$@"
|
||||
shift 1
|
||||
;;
|
||||
--prefix)
|
||||
formatArgsLine 3 "$@"
|
||||
shift 3
|
||||
;;
|
||||
--suffix)
|
||||
formatArgsLine 3 "$@"
|
||||
shift 3
|
||||
;;
|
||||
--chdir)
|
||||
formatArgsLine 1 "$@"
|
||||
shift 1
|
||||
;;
|
||||
--add-flags)
|
||||
formatArgsLine 1 "$@"
|
||||
shift 1
|
||||
;;
|
||||
--argv0)
|
||||
formatArgsLine 1 "$@"
|
||||
shift 1
|
||||
;;
|
||||
--inherit-argv0)
|
||||
formatArgsLine 0 "$@"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
printf '%s\n' ""
|
||||
}
|
||||
|
||||
# formatArgsLine ARG_COUNT ARGS
|
||||
formatArgsLine() {
|
||||
local ARG_COUNT LENGTH
|
||||
ARG_COUNT=$1
|
||||
LENGTH=$#
|
||||
shift
|
||||
printf '%s' $' \\\n '"$1"
|
||||
shift
|
||||
while [ "$ARG_COUNT" -gt $((LENGTH - $# - 2)) ]; do
|
||||
printf ' %s' "${1@Q}"
|
||||
shift
|
||||
done
|
||||
}
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
postPhases+=" coverageReportPhase"
|
||||
|
||||
coverageReportPhase() {
|
||||
lcov --directory . --capture --output-file app.info
|
||||
set -o noglob
|
||||
lcov --remove app.info ${lcovFilter:-"/nix/store/*"} > app2.info
|
||||
set +o noglob
|
||||
mv app2.info app.info
|
||||
|
||||
mkdir -p $out/coverage
|
||||
genhtml app.info $lcovExtraTraceFiles -o $out/coverage > log
|
||||
|
||||
# Grab the overall coverage percentage so that Hydra can plot it over time.
|
||||
mkdir -p $out/nix-support
|
||||
lineCoverage="$(sed 's/.*lines\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)"
|
||||
functionCoverage="$(sed 's/.*functions\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)"
|
||||
if [ -z "$lineCoverage" -o -z "$functionCoverage" ]; then
|
||||
echo "failed to get coverage statistics"
|
||||
exit 1
|
||||
fi
|
||||
echo "lineCoverage $lineCoverage %" >> $out/nix-support/hydra-metrics
|
||||
echo "functionCoverage $functionCoverage %" >> $out/nix-support/hydra-metrics
|
||||
|
||||
echo "report coverage $out/coverage" >> $out/nix-support/hydra-build-products
|
||||
}
|
||||
28
pkgs/build-support/setup-hooks/make-symlinks-relative.sh
Normal file
28
pkgs/build-support/setup-hooks/make-symlinks-relative.sh
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
fixupOutputHooks+=(_makeSymlinksRelative)
|
||||
|
||||
# For every symlink in $output that refers to another file in $output
|
||||
# ensure that the symlink is relative. This removes references to the output
|
||||
# has from the resulting store paths and thus the NAR files.
|
||||
_makeSymlinksRelative() {
|
||||
local symlinkTarget
|
||||
|
||||
if [ -n "${dontRewriteSymlinks-}" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
while IFS= read -r -d $'\0' f; do
|
||||
symlinkTarget=$(readlink "$f")
|
||||
if [[ "$symlinkTarget"/ != "$prefix"/* ]]; then
|
||||
# skip this symlink as it doesn't point to $prefix
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ ! -e "$symlinkTarget" ]; then
|
||||
echo "the symlink $f is broken, it points to $symlinkTarget (which is missing)"
|
||||
fi
|
||||
|
||||
echo "rewriting symlink $f to be relative to $prefix"
|
||||
ln -snrf "$symlinkTarget" "$f"
|
||||
|
||||
done < <(find $prefix -type l -print0)
|
||||
}
|
||||
215
pkgs/build-support/setup-hooks/make-wrapper.sh
Normal file
215
pkgs/build-support/setup-hooks/make-wrapper.sh
Normal file
|
|
@ -0,0 +1,215 @@
|
|||
# Assert that FILE exists and is executable
|
||||
#
|
||||
# assertExecutable FILE
|
||||
assertExecutable() {
|
||||
local file="$1"
|
||||
[[ -f "$file" && -x "$file" ]] || \
|
||||
die "Cannot wrap '$file' because it is not an executable file"
|
||||
}
|
||||
|
||||
# construct an executable file that wraps the actual executable
|
||||
# makeWrapper EXECUTABLE OUT_PATH ARGS
|
||||
|
||||
# ARGS:
|
||||
# --argv0 NAME : set the name of the executed process to NAME
|
||||
# (if unset or empty, defaults to EXECUTABLE)
|
||||
# --inherit-argv0 : the executable inherits argv0 from the wrapper.
|
||||
# (use instead of --argv0 '$0')
|
||||
# --set VAR VAL : add VAR with value VAL to the executable's environment
|
||||
# --set-default VAR VAL : like --set, but only adds VAR if not already set in
|
||||
# the environment
|
||||
# --unset VAR : remove VAR from the environment
|
||||
# --chdir DIR : change working directory (use instead of --run "cd DIR")
|
||||
# --run COMMAND : run command before the executable
|
||||
# --add-flags FLAGS : add FLAGS to invocation of executable
|
||||
# TODO(@ncfavier): --append-flags
|
||||
|
||||
# --prefix ENV SEP VAL : suffix/prefix ENV with VAL, separated by SEP
|
||||
# --suffix
|
||||
# --prefix-each ENV SEP VALS : like --prefix, but VALS is a list
|
||||
# --suffix-each ENV SEP VALS : like --suffix, but VALS is a list
|
||||
# --prefix-contents ENV SEP FILES : like --suffix-each, but contents of FILES
|
||||
# are read first and used as VALS
|
||||
# --suffix-contents
|
||||
makeWrapper() { makeShellWrapper "$@"; }
|
||||
makeShellWrapper() {
|
||||
local original="$1"
|
||||
local wrapper="$2"
|
||||
local params varName value command separator n fileNames
|
||||
local argv0 flagsBefore flags
|
||||
|
||||
assertExecutable "$original"
|
||||
|
||||
# Write wrapper code which adds `value` to the beginning or end of
|
||||
# the list variable named by `varName`, depending on the `mode`
|
||||
# specified.
|
||||
#
|
||||
# A value which is already part of the list will not be added
|
||||
# again. If this is the case and the `suffix` mode is used, the
|
||||
# list won't be touched at all. The `prefix` mode will however
|
||||
# move the last matching instance of the value to the beginning
|
||||
# of the list. Any remaining duplicates of the value will be left
|
||||
# as-is.
|
||||
addValue() {
|
||||
local mode="$1" # `prefix` or `suffix` to add to the beginning or end respectively
|
||||
local varName="$2" # name of list variable to add to
|
||||
local separator="$3" # character used to separate elements of list
|
||||
local value="$4" # one value, or multiple values separated by `separator`, to add to list
|
||||
|
||||
# Disable file globbing, since bash will otherwise try to find
|
||||
# filenames matching the the value to be prefixed/suffixed if
|
||||
# it contains characters considered wildcards, such as `?` and
|
||||
# `*`. We want the value as is, except we also want to split
|
||||
# it on on the separator; hence we can't quote it.
|
||||
local reenableGlob=0
|
||||
if [[ ! -o noglob ]]; then
|
||||
reenableGlob=1
|
||||
fi
|
||||
set -o noglob
|
||||
|
||||
if [[ -n "$value" ]]; then
|
||||
local old_ifs=$IFS
|
||||
IFS=$separator
|
||||
|
||||
if [[ "$mode" == '--prefix'* ]]; then
|
||||
# Keep the order of the components as written when
|
||||
# prefixing; normally, they would be added in the
|
||||
# reverse order.
|
||||
local tmp=
|
||||
for v in $value; do
|
||||
tmp=$v${tmp:+$separator}$tmp
|
||||
done
|
||||
value="$tmp"
|
||||
fi
|
||||
for v in $value; do
|
||||
{
|
||||
echo "$varName=\${$varName:+${separator@Q}\$$varName${separator@Q}}" # add separators on both ends unless empty
|
||||
if [[ "$mode" == '--prefix'* ]]; then # -- in prefix mode --
|
||||
echo "$varName=\${$varName/${separator@Q}${v@Q}${separator@Q}/${separator@Q}}" # remove the first instance of the value (if any)
|
||||
echo "$varName=${v@Q}\$$varName" # prepend the value
|
||||
elif [[ "$mode" == '--suffix'* ]]; then # -- in suffix mode --
|
||||
echo "if [[ \$$varName != *${separator@Q}${v@Q}${separator@Q}* ]]; then" # if the value isn't already in the list
|
||||
echo " $varName=\$$varName${v@Q}" # append the value
|
||||
echo "fi"
|
||||
else
|
||||
echo "unknown mode $mode!" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
echo "$varName=\${$varName#${separator@Q}}" # remove leading separator
|
||||
echo "$varName=\${$varName%${separator@Q}}" # remove trailing separator
|
||||
echo "export $varName"
|
||||
} >> "$wrapper"
|
||||
done
|
||||
IFS=$old_ifs
|
||||
fi
|
||||
|
||||
if (( reenableGlob )); then
|
||||
set +o noglob
|
||||
fi
|
||||
}
|
||||
|
||||
mkdir -p "$(dirname "$wrapper")"
|
||||
|
||||
echo "#! @shell@ -e" > "$wrapper"
|
||||
|
||||
params=("$@")
|
||||
for ((n = 2; n < ${#params[*]}; n += 1)); do
|
||||
p="${params[$n]}"
|
||||
|
||||
if [[ "$p" == "--set" ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
value="${params[$((n + 2))]}"
|
||||
n=$((n + 2))
|
||||
echo "export $varName=${value@Q}" >> "$wrapper"
|
||||
elif [[ "$p" == "--set-default" ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
value="${params[$((n + 2))]}"
|
||||
n=$((n + 2))
|
||||
echo "export $varName=\${$varName-${value@Q}}" >> "$wrapper"
|
||||
elif [[ "$p" == "--unset" ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
n=$((n + 1))
|
||||
echo "unset $varName" >> "$wrapper"
|
||||
elif [[ "$p" == "--chdir" ]]; then
|
||||
dir="${params[$((n + 1))]}"
|
||||
n=$((n + 1))
|
||||
echo "cd ${dir@Q}" >> "$wrapper"
|
||||
elif [[ "$p" == "--run" ]]; then
|
||||
command="${params[$((n + 1))]}"
|
||||
n=$((n + 1))
|
||||
echo "$command" >> "$wrapper"
|
||||
elif [[ ("$p" == "--suffix") || ("$p" == "--prefix") ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
separator="${params[$((n + 2))]}"
|
||||
value="${params[$((n + 3))]}"
|
||||
n=$((n + 3))
|
||||
addValue "$p" "$varName" "$separator" "$value"
|
||||
elif [[ ("$p" == "--suffix-each") || ("$p" == "--prefix-each") ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
separator="${params[$((n + 2))]}"
|
||||
values="${params[$((n + 3))]}"
|
||||
n=$((n + 3))
|
||||
for value in $values; do
|
||||
addValue "$p" "$varName" "$separator" "$value"
|
||||
done
|
||||
elif [[ ("$p" == "--suffix-contents") || ("$p" == "--prefix-contents") ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
separator="${params[$((n + 2))]}"
|
||||
fileNames="${params[$((n + 3))]}"
|
||||
n=$((n + 3))
|
||||
for fileName in $fileNames; do
|
||||
contents="$(cat "$fileName")"
|
||||
addValue "$p" "$varName" "$separator" "$contents"
|
||||
done
|
||||
elif [[ "$p" == "--add-flags" ]]; then
|
||||
flags="${params[$((n + 1))]}"
|
||||
n=$((n + 1))
|
||||
flagsBefore="$flagsBefore $flags"
|
||||
elif [[ "$p" == "--argv0" ]]; then
|
||||
argv0="${params[$((n + 1))]}"
|
||||
n=$((n + 1))
|
||||
elif [[ "$p" == "--inherit-argv0" ]]; then
|
||||
# Whichever comes last of --argv0 and --inherit-argv0 wins
|
||||
argv0='$0'
|
||||
else
|
||||
die "makeWrapper doesn't understand the arg $p"
|
||||
fi
|
||||
done
|
||||
|
||||
echo exec ${argv0:+-a \"$argv0\"} \""$original"\" \
|
||||
"$flagsBefore" '"$@"' >> "$wrapper"
|
||||
|
||||
chmod +x "$wrapper"
|
||||
}
|
||||
|
||||
addSuffix() {
|
||||
suffix="$1"
|
||||
shift
|
||||
for name in "$@"; do
|
||||
echo "$name$suffix"
|
||||
done
|
||||
}
|
||||
|
||||
filterExisting() {
|
||||
for fn in "$@"; do
|
||||
if test -e "$fn"; then
|
||||
echo "$fn"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Syntax: wrapProgram <PROGRAM> <MAKE-WRAPPER FLAGS...>
|
||||
wrapProgram() { wrapProgramShell "$@"; }
|
||||
wrapProgramShell() {
|
||||
local prog="$1"
|
||||
local hidden
|
||||
|
||||
assertExecutable "$prog"
|
||||
|
||||
hidden="$(dirname "$prog")/.$(basename "$prog")"-wrapped
|
||||
while [ -e "$hidden" ]; do
|
||||
hidden="${hidden}_"
|
||||
done
|
||||
mv "$prog" "$hidden"
|
||||
makeWrapper "$hidden" "$prog" --inherit-argv0 "${@:2}"
|
||||
}
|
||||
23
pkgs/build-support/setup-hooks/move-docs.sh
Normal file
23
pkgs/build-support/setup-hooks/move-docs.sh
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
# This setup hook moves $out/{man,doc,info} to $out/share; moves
|
||||
# $out/share/man to $man/share/man; and moves $out/share/doc to
|
||||
# $man/share/doc.
|
||||
|
||||
preFixupHooks+=(_moveToShare)
|
||||
|
||||
_moveToShare() {
|
||||
forceShare=${forceShare:=man doc info}
|
||||
if [[ -z "$out" ]]; then return; fi
|
||||
|
||||
for d in $forceShare; do
|
||||
if [ -d "$out/$d" ]; then
|
||||
if [ -d "$out/share/$d" ]; then
|
||||
echo "both $d/ and share/$d/ exist!"
|
||||
else
|
||||
echo "moving $out/$d to $out/share/$d"
|
||||
mkdir -p $out/share
|
||||
mv $out/$d $out/share/
|
||||
fi
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
22
pkgs/build-support/setup-hooks/move-lib64.sh
Normal file
22
pkgs/build-support/setup-hooks/move-lib64.sh
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# This setup hook, for each output, moves everything in $output/lib64
|
||||
# to $output/lib, and replaces $output/lib64 with a symlink to
|
||||
# $output/lib. The rationale is that lib64 directories are unnecessary
|
||||
# in Nix (since 32-bit and 64-bit builds of a package are in different
|
||||
# store paths anyway).
|
||||
# If the move would overwrite anything, it should fail on rmdir.
|
||||
|
||||
fixupOutputHooks+=(_moveLib64)
|
||||
|
||||
_moveLib64() {
|
||||
if [ "${dontMoveLib64-}" = 1 ]; then return; fi
|
||||
if [ ! -e "$prefix/lib64" -o -L "$prefix/lib64" ]; then return; fi
|
||||
echo "moving $prefix/lib64/* to $prefix/lib"
|
||||
mkdir -p $prefix/lib
|
||||
shopt -s dotglob
|
||||
for i in $prefix/lib64/*; do
|
||||
mv --no-clobber "$i" $prefix/lib
|
||||
done
|
||||
shopt -u dotglob
|
||||
rmdir $prefix/lib64
|
||||
ln -s lib $prefix/lib64
|
||||
}
|
||||
19
pkgs/build-support/setup-hooks/move-sbin.sh
Normal file
19
pkgs/build-support/setup-hooks/move-sbin.sh
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# This setup hook, for each output, moves everything in $output/sbin
|
||||
# to $output/bin, and replaces $output/sbin with a symlink to
|
||||
# $output/bin.
|
||||
|
||||
fixupOutputHooks+=(_moveSbin)
|
||||
|
||||
_moveSbin() {
|
||||
if [ "${dontMoveSbin-}" = 1 ]; then return; fi
|
||||
if [ ! -e "$prefix/sbin" -o -L "$prefix/sbin" ]; then return; fi
|
||||
echo "moving $prefix/sbin/* to $prefix/bin"
|
||||
mkdir -p $prefix/bin
|
||||
shopt -s dotglob
|
||||
for i in $prefix/sbin/*; do
|
||||
mv "$i" $prefix/bin
|
||||
done
|
||||
shopt -u dotglob
|
||||
rmdir $prefix/sbin
|
||||
ln -s bin $prefix/sbin
|
||||
}
|
||||
25
pkgs/build-support/setup-hooks/move-systemd-user-units.sh
Executable file
25
pkgs/build-support/setup-hooks/move-systemd-user-units.sh
Executable file
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# This setup hook, for each output, moves everything in
|
||||
# $output/lib/systemd/user to $output/share/systemd/user, and replaces
|
||||
# $output/lib/systemd/user with a symlink to
|
||||
# $output/share/systemd/user.
|
||||
|
||||
fixupOutputHooks+=(_moveSystemdUserUnits)
|
||||
|
||||
_moveSystemdUserUnits() {
|
||||
if [ "${dontMoveSystemdUserUnits:-0}" = 1 ]; then return; fi
|
||||
if [ ! -e "${prefix:?}/lib/systemd/user" ]; then return; fi
|
||||
local source="$prefix/lib/systemd/user"
|
||||
local target="$prefix/share/systemd/user"
|
||||
echo "moving $source/* to $target"
|
||||
mkdir -p "$target"
|
||||
(
|
||||
shopt -s dotglob
|
||||
for i in "$source"/*; do
|
||||
mv "$i" "$target"
|
||||
done
|
||||
)
|
||||
rmdir "$source"
|
||||
ln -s "$target" "$source"
|
||||
}
|
||||
199
pkgs/build-support/setup-hooks/multiple-outputs.sh
Normal file
199
pkgs/build-support/setup-hooks/multiple-outputs.sh
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
# The base package for automatic multiple-output splitting. Used in stdenv as well.
|
||||
preConfigureHooks+=(_multioutConfig)
|
||||
preFixupHooks+=(_multioutDocs)
|
||||
preFixupHooks+=(_multioutDevs)
|
||||
postFixupHooks+=(_multioutPropagateDev)
|
||||
|
||||
# Assign the first string containing nonempty variable to the variable named $1
|
||||
_assignFirst() {
|
||||
local varName="$1"
|
||||
local REMOVE=REMOVE # slightly hacky - we allow REMOVE (i.e. not a variable name)
|
||||
shift
|
||||
while (( $# )); do
|
||||
if [ -n "${!1-}" ]; then eval "${varName}"="$1"; return; fi
|
||||
shift
|
||||
done
|
||||
echo "Error: _assignFirst found no valid variant!"
|
||||
return 1 # none found
|
||||
}
|
||||
|
||||
# Same as _assignFirst, but only if "$1" = ""
|
||||
_overrideFirst() {
|
||||
if [ -z "${!1-}" ]; then
|
||||
_assignFirst "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# Setup chains of sane default values with easy overridability.
|
||||
# The variables are global to be usable anywhere during the build.
|
||||
# Typical usage in package is defining outputBin = "dev";
|
||||
|
||||
_overrideFirst outputDev "dev" "out"
|
||||
_overrideFirst outputBin "bin" "out"
|
||||
|
||||
_overrideFirst outputInclude "$outputDev"
|
||||
|
||||
# so-libs are often among the main things to keep, and so go to $out
|
||||
_overrideFirst outputLib "lib" "out"
|
||||
|
||||
_overrideFirst outputDoc "doc" "out"
|
||||
_overrideFirst outputDevdoc "devdoc" REMOVE # documentation for developers
|
||||
# man and info pages are small and often useful to distribute with binaries
|
||||
_overrideFirst outputMan "man" "$outputBin"
|
||||
_overrideFirst outputDevman "devman" "devdoc" "$outputMan"
|
||||
_overrideFirst outputInfo "info" "$outputBin"
|
||||
|
||||
|
||||
# Add standard flags to put files into the desired outputs.
|
||||
_multioutConfig() {
|
||||
if [ "$outputs" = "out" ] || [ -z "${setOutputFlags-1}" ]; then return; fi;
|
||||
|
||||
# try to detect share/doc/${shareDocName}
|
||||
# Note: sadly, $configureScript detection comes later in configurePhase,
|
||||
# and reordering would cause more trouble than worth.
|
||||
if [ -z "$shareDocName" ]; then
|
||||
local confScript="$configureScript"
|
||||
if [ -z "$confScript" ] && [ -x ./configure ]; then
|
||||
confScript=./configure
|
||||
fi
|
||||
if [ -f "$confScript" ]; then
|
||||
local shareDocName="$(sed -n "s/^PACKAGE_TARNAME='\(.*\)'$/\1/p" < "$confScript")"
|
||||
fi
|
||||
# PACKAGE_TARNAME sometimes contains garbage.
|
||||
if [ -z "$shareDocName" ] || echo "$shareDocName" | grep -q '[^a-zA-Z0-9_-]'; then
|
||||
shareDocName="$(echo "$name" | sed 's/-[^a-zA-Z].*//')"
|
||||
fi
|
||||
fi
|
||||
|
||||
configureFlags="\
|
||||
--bindir=${!outputBin}/bin --sbindir=${!outputBin}/sbin \
|
||||
--includedir=${!outputInclude}/include --oldincludedir=${!outputInclude}/include \
|
||||
--mandir=${!outputMan}/share/man --infodir=${!outputInfo}/share/info \
|
||||
--docdir=${!outputDoc}/share/doc/${shareDocName} \
|
||||
--libdir=${!outputLib}/lib --libexecdir=${!outputLib}/libexec \
|
||||
--localedir=${!outputLib}/share/locale \
|
||||
$configureFlags"
|
||||
|
||||
installFlags="\
|
||||
pkgconfigdir=${!outputDev}/lib/pkgconfig \
|
||||
m4datadir=${!outputDev}/share/aclocal aclocaldir=${!outputDev}/share/aclocal \
|
||||
$installFlags"
|
||||
}
|
||||
|
||||
|
||||
# Add rpath prefixes to library paths, and avoid stdenv doing it for $out.
|
||||
_addRpathPrefix "${!outputLib}"
|
||||
NIX_NO_SELF_RPATH=1
|
||||
|
||||
|
||||
# Move subpaths that match pattern $1 from under any output/ to the $2 output/
|
||||
# Beware: only globbing patterns are accepted, e.g.: * ? {foo,bar}
|
||||
# A special target "REMOVE" is allowed: moveToOutput foo REMOVE
|
||||
moveToOutput() {
|
||||
local patt="$1"
|
||||
local dstOut="$2"
|
||||
local output
|
||||
for output in $outputs; do
|
||||
if [ "${!output}" = "$dstOut" ]; then continue; fi
|
||||
local srcPath
|
||||
for srcPath in "${!output}"/$patt; do
|
||||
# apply to existing files/dirs, *including* broken symlinks
|
||||
if [ ! -e "$srcPath" ] && [ ! -L "$srcPath" ]; then continue; fi
|
||||
|
||||
if [ "$dstOut" = REMOVE ]; then
|
||||
echo "Removing $srcPath"
|
||||
rm -r "$srcPath"
|
||||
else
|
||||
local dstPath="$dstOut${srcPath#${!output}}"
|
||||
echo "Moving $srcPath to $dstPath"
|
||||
|
||||
if [ -d "$dstPath" ] && [ -d "$srcPath" ]
|
||||
then # attempt directory merge
|
||||
# check the case of trying to move an empty directory
|
||||
rmdir "$srcPath" --ignore-fail-on-non-empty
|
||||
if [ -d "$srcPath" ]; then
|
||||
mv -t "$dstPath" "$srcPath"/*
|
||||
rmdir "$srcPath"
|
||||
fi
|
||||
else # usual move
|
||||
mkdir -p "$(readlink -m "$dstPath/..")"
|
||||
mv "$srcPath" "$dstPath"
|
||||
fi
|
||||
fi
|
||||
|
||||
# remove empty directories, printing iff at least one gets removed
|
||||
local srcParent="$(readlink -m "$srcPath/..")"
|
||||
if rmdir "$srcParent"; then
|
||||
echo "Removing empty $srcParent/ and (possibly) its parents"
|
||||
rmdir -p --ignore-fail-on-non-empty "$(readlink -m "$srcParent/..")" \
|
||||
2> /dev/null || true # doesn't ignore failure for some reason
|
||||
fi
|
||||
done
|
||||
done
|
||||
}
|
||||
|
||||
# Move documentation to the desired outputs.
|
||||
_multioutDocs() {
|
||||
local REMOVE=REMOVE # slightly hacky - we expand ${!outputFoo}
|
||||
|
||||
moveToOutput share/info "${!outputInfo}"
|
||||
moveToOutput share/doc "${!outputDoc}"
|
||||
moveToOutput share/gtk-doc "${!outputDevdoc}"
|
||||
moveToOutput share/devhelp/books "${!outputDevdoc}"
|
||||
|
||||
# the default outputMan is in $bin
|
||||
moveToOutput share/man "${!outputMan}"
|
||||
moveToOutput share/man/man3 "${!outputDevman}"
|
||||
}
|
||||
|
||||
# Move development-only stuff to the desired outputs.
|
||||
_multioutDevs() {
|
||||
if [ "$outputs" = "out" ] || [ -z "${moveToDev-1}" ]; then return; fi;
|
||||
moveToOutput include "${!outputInclude}"
|
||||
# these files are sometimes provided even without using the corresponding tool
|
||||
moveToOutput lib/pkgconfig "${!outputDev}"
|
||||
moveToOutput share/pkgconfig "${!outputDev}"
|
||||
moveToOutput lib/cmake "${!outputDev}"
|
||||
moveToOutput share/aclocal "${!outputDev}"
|
||||
# don't move *.la, as libtool needs them in the directory of the library
|
||||
|
||||
for f in "${!outputDev}"/{lib,share}/pkgconfig/*.pc; do
|
||||
echo "Patching '$f' includedir to output ${!outputInclude}"
|
||||
sed -i "/^includedir=/s,=\${prefix},=${!outputInclude}," "$f"
|
||||
done
|
||||
}
|
||||
|
||||
# Make the "dev" propagate other outputs needed for development.
|
||||
_multioutPropagateDev() {
|
||||
if [ "$outputs" = "out" ]; then return; fi;
|
||||
|
||||
local outputFirst
|
||||
for outputFirst in $outputs; do
|
||||
break
|
||||
done
|
||||
local propagaterOutput="$outputDev"
|
||||
if [ -z "$propagaterOutput" ]; then
|
||||
propagaterOutput="$outputFirst"
|
||||
fi
|
||||
|
||||
# Default value: propagate binaries, includes and libraries
|
||||
if [ -z "${propagatedBuildOutputs+1}" ]; then
|
||||
local po_dirty="$outputBin $outputInclude $outputLib"
|
||||
set +o pipefail
|
||||
propagatedBuildOutputs=`echo "$po_dirty" \
|
||||
| tr -s ' ' '\n' | grep -v -F "$propagaterOutput" \
|
||||
| sort -u | tr '\n' ' ' `
|
||||
set -o pipefail
|
||||
fi
|
||||
|
||||
# The variable was explicitly set to empty or we resolved it so
|
||||
if [ -z "$propagatedBuildOutputs" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
mkdir -p "${!propagaterOutput}"/nix-support
|
||||
for output in $propagatedBuildOutputs; do
|
||||
echo -n " ${!output}" >> "${!propagaterOutput}"/nix-support/propagated-build-inputs
|
||||
done
|
||||
}
|
||||
119
pkgs/build-support/setup-hooks/patch-shebangs.sh
Normal file
119
pkgs/build-support/setup-hooks/patch-shebangs.sh
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
# This setup hook causes the fixup phase to rewrite all script
|
||||
# interpreter file names (`#! /path') to paths found in $PATH. E.g.,
|
||||
# /bin/sh will be rewritten to /nix/store/<hash>-some-bash/bin/sh.
|
||||
# /usr/bin/env gets special treatment so that ".../bin/env python" is
|
||||
# rewritten to /nix/store/<hash>/bin/python. Interpreters that are
|
||||
# already in the store are left untouched.
|
||||
# A script file must be marked as executable, otherwise it will not be
|
||||
# considered.
|
||||
|
||||
fixupOutputHooks+=(patchShebangsAuto)
|
||||
|
||||
# Run patch shebangs on a directory or file.
|
||||
# Can take multiple paths as arguments.
|
||||
# patchShebangs [--build | --host] PATH...
|
||||
|
||||
# Flags:
|
||||
# --build : Lookup commands available at build-time
|
||||
# --host : Lookup commands available at runtime
|
||||
|
||||
# Example use cases,
|
||||
# $ patchShebangs --host /nix/store/...-hello-1.0/bin
|
||||
# $ patchShebangs --build configure
|
||||
|
||||
patchShebangs() {
|
||||
local pathName
|
||||
|
||||
if [[ "$1" == "--host" ]]; then
|
||||
pathName=HOST_PATH
|
||||
shift
|
||||
elif [[ "$1" == "--build" ]]; then
|
||||
pathName=PATH
|
||||
shift
|
||||
fi
|
||||
|
||||
echo "patching script interpreter paths in $@"
|
||||
local f
|
||||
local oldPath
|
||||
local newPath
|
||||
local arg0
|
||||
local args
|
||||
local oldInterpreterLine
|
||||
local newInterpreterLine
|
||||
|
||||
if [[ $# -eq 0 ]]; then
|
||||
echo "No arguments supplied to patchShebangs" >&2
|
||||
return 0
|
||||
fi
|
||||
|
||||
local f
|
||||
while IFS= read -r -d $'\0' f; do
|
||||
isScript "$f" || continue
|
||||
|
||||
read -r oldInterpreterLine < "$f"
|
||||
read -r oldPath arg0 args <<< "${oldInterpreterLine:2}"
|
||||
|
||||
if [[ -z "$pathName" ]]; then
|
||||
if [[ -n $strictDeps && $f == "$NIX_STORE"* ]]; then
|
||||
pathName=HOST_PATH
|
||||
else
|
||||
pathName=PATH
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$oldPath" == *"/bin/env" ]]; then
|
||||
# Check for unsupported 'env' functionality:
|
||||
# - options: something starting with a '-'
|
||||
# - environment variables: foo=bar
|
||||
if [[ $arg0 == "-"* || $arg0 == *"="* ]]; then
|
||||
echo "$f: unsupported interpreter directive \"$oldInterpreterLine\" (set dontPatchShebangs=1 and handle shebang patching yourself)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
newPath="$(PATH="${!pathName}" command -v "$arg0" || true)"
|
||||
else
|
||||
if [[ -z $oldPath ]]; then
|
||||
# If no interpreter is specified linux will use /bin/sh. Set
|
||||
# oldpath="/bin/sh" so that we get /nix/store/.../sh.
|
||||
oldPath="/bin/sh"
|
||||
fi
|
||||
|
||||
newPath="$(PATH="${!pathName}" command -v "$(basename "$oldPath")" || true)"
|
||||
|
||||
args="$arg0 $args"
|
||||
fi
|
||||
|
||||
# Strip trailing whitespace introduced when no arguments are present
|
||||
newInterpreterLine="$newPath $args"
|
||||
newInterpreterLine=${newInterpreterLine%${newInterpreterLine##*[![:space:]]}}
|
||||
|
||||
if [[ -n "$oldPath" && "${oldPath:0:${#NIX_STORE}}" != "$NIX_STORE" ]]; then
|
||||
if [[ -n "$newPath" && "$newPath" != "$oldPath" ]]; then
|
||||
echo "$f: interpreter directive changed from \"$oldInterpreterLine\" to \"$newInterpreterLine\""
|
||||
# escape the escape chars so that sed doesn't interpret them
|
||||
escapedInterpreterLine=${newInterpreterLine//\\/\\\\}
|
||||
|
||||
# Preserve times, see: https://github.com/NixOS/nixpkgs/pull/33281
|
||||
timestamp=$(stat --printf "%y" "$f")
|
||||
sed -i -e "1 s|.*|#\!$escapedInterpreterLine|" "$f"
|
||||
touch --date "$timestamp" "$f"
|
||||
fi
|
||||
fi
|
||||
done < <(find "$@" -type f -perm -0100 -print0)
|
||||
|
||||
stopNest
|
||||
}
|
||||
|
||||
patchShebangsAuto () {
|
||||
if [[ -z "${dontPatchShebangs-}" && -e "$prefix" ]]; then
|
||||
|
||||
# Dev output will end up being run on the build platform. An
|
||||
# example case of this is sdl2-config. Otherwise, we can just
|
||||
# use the runtime path (--host).
|
||||
if [[ "$output" != out && "$output" = "$outputDev" ]]; then
|
||||
patchShebangs --build "$prefix"
|
||||
else
|
||||
patchShebangs --host "$prefix"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
{ callPackage, makeSetupHook }:
|
||||
|
||||
(makeSetupHook {
|
||||
name = "postgresql-test-hook";
|
||||
} ./postgresql-test-hook.sh).overrideAttrs (o: {
|
||||
passthru.tests = {
|
||||
simple = callPackage ./test.nix { };
|
||||
};
|
||||
})
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
preCheckHooks+=('postgresqlStart')
|
||||
postCheckHooks+=('postgresqlStop')
|
||||
|
||||
|
||||
postgresqlStart() {
|
||||
|
||||
# Add default environment variable values
|
||||
#
|
||||
# Client variables:
|
||||
# - https://www.postgresql.org/docs/current/libpq-envars.html
|
||||
#
|
||||
# Server variables:
|
||||
# - only PGDATA: https://www.postgresql.org/docs/current/creating-cluster.html
|
||||
|
||||
if [[ "${PGDATA:-}" == "" ]]; then
|
||||
PGDATA="$NIX_BUILD_TOP/postgresql"
|
||||
fi
|
||||
export PGDATA
|
||||
|
||||
if [[ "${PGHOST:-}" == "" ]]; then
|
||||
mkdir -p "$NIX_BUILD_TOP/run/postgresql"
|
||||
PGHOST="$NIX_BUILD_TOP/run/postgresql"
|
||||
fi
|
||||
export PGHOST
|
||||
|
||||
if [[ "${PGUSER:-}" == "" ]]; then
|
||||
PGUSER="test_user"
|
||||
fi
|
||||
export PGUSER
|
||||
|
||||
if [[ "${PGDATABASE:-}" == "" ]]; then
|
||||
PGDATABASE="test_db"
|
||||
fi
|
||||
export PGDATABASE
|
||||
|
||||
if [[ "${postgresqlTestUserOptions:-}" == "" ]]; then
|
||||
postgresqlTestUserOptions="LOGIN"
|
||||
fi
|
||||
|
||||
if [[ "${postgresqlTestSetupSQL:-}" == "" ]]; then
|
||||
postgresqlTestSetupSQL="$(cat <<EOF
|
||||
CREATE ROLE "$PGUSER" $postgresqlTestUserOptions;
|
||||
CREATE DATABASE "$PGDATABASE" OWNER '$PGUSER';
|
||||
EOF
|
||||
)"
|
||||
fi
|
||||
|
||||
if [[ "${postgresqlTestSetupCommands:-}" == "" ]]; then
|
||||
postgresqlTestSetupCommands='echo "$postgresqlTestSetupSQL" | PGUSER=postgres psql postgres'
|
||||
fi
|
||||
|
||||
if ! type initdb >/dev/null; then
|
||||
echo >&2 'initdb not found. Did you add postgresql to the checkInputs?'
|
||||
false
|
||||
fi
|
||||
header 'initializing postgresql'
|
||||
initdb -U postgres
|
||||
|
||||
# Move the socket
|
||||
echo "unix_socket_directories = '$NIX_BUILD_TOP/run/postgresql'" >>"$PGDATA/postgresql.conf"
|
||||
|
||||
# TCP ports can be a problem in some sandboxes,
|
||||
# so we disable tcp listening by default
|
||||
if ! [[ "${postgresqlEnableTCP:-}" = 1 ]]; then
|
||||
echo "listen_addresses = ''" >>"$PGDATA/postgresql.conf"
|
||||
fi
|
||||
|
||||
header 'starting postgresql'
|
||||
eval "${postgresqlStartCommands:-pg_ctl start}"
|
||||
|
||||
header 'setting up postgresql'
|
||||
eval "$postgresqlTestSetupCommands"
|
||||
|
||||
}
|
||||
|
||||
postgresqlStop() {
|
||||
header 'stopping postgresql'
|
||||
pg_ctl stop
|
||||
}
|
||||
27
pkgs/build-support/setup-hooks/postgresql-test-hook/test.nix
Normal file
27
pkgs/build-support/setup-hooks/postgresql-test-hook/test.nix
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
{ postgresql, postgresqlTestHook, stdenv }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "postgresql-test-hook-test";
|
||||
buildInputs = [ postgresqlTestHook ];
|
||||
checkInputs = [ postgresql ];
|
||||
dontUnpack = true;
|
||||
doCheck = true;
|
||||
passAsFile = ["sql"];
|
||||
sql = ''
|
||||
CREATE TABLE hello (
|
||||
message text
|
||||
);
|
||||
INSERT INTO hello VALUES ('it '||'worked');
|
||||
SELECT * FROM hello;
|
||||
'';
|
||||
checkPhase = ''
|
||||
runHook preCheck
|
||||
psql <$sqlPath | grep 'it worked'
|
||||
TEST_RAN=1
|
||||
runHook postCheck
|
||||
'';
|
||||
installPhase = ''
|
||||
[[ $TEST_RAN == 1 ]]
|
||||
touch $out
|
||||
'';
|
||||
}
|
||||
22
pkgs/build-support/setup-hooks/prune-libtool-files.sh
Normal file
22
pkgs/build-support/setup-hooks/prune-libtool-files.sh
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# Clear dependency_libs in libtool files for shared libraries.
|
||||
|
||||
# Shared libraries already encode their dependencies with locations. .la
|
||||
# files do not always encode those locations, and sometimes encode the
|
||||
# locations in the wrong Nix output. .la files are not needed for shared
|
||||
# libraries, but without dependency_libs they do not hurt either.
|
||||
|
||||
fixupOutputHooks+=(_pruneLibtoolFiles)
|
||||
|
||||
_pruneLibtoolFiles() {
|
||||
if [ "${dontPruneLibtoolFiles-}" ] || [ ! -e "$prefix" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# Libtool uses "dlname" and "library_names" fields for shared libraries and
|
||||
# the "old_library" field for static libraries. We are processing only
|
||||
# those .la files that do not describe static libraries.
|
||||
find "$prefix" -type f -name '*.la' \
|
||||
-exec grep -q '^# Generated by .*libtool' {} \; \
|
||||
-exec grep -q "^old_library=''" {} \; \
|
||||
-exec sed -i {} -e "/^dependency_libs='[^']/ c dependency_libs='' #pruned" \;
|
||||
}
|
||||
10
pkgs/build-support/setup-hooks/reproducible-builds.sh
Normal file
10
pkgs/build-support/setup-hooks/reproducible-builds.sh
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# Use the last part of the out path as hash input for the build.
|
||||
# This should ensure that it is deterministic across rebuilds of the same
|
||||
# derivation and not easily collide with other builds.
|
||||
# We also truncate the hash so that it cannot cause reference cycles.
|
||||
NIX_CFLAGS_COMPILE="${NIX_CFLAGS_COMPILE:-} -frandom-seed=$(
|
||||
outbase="${out##*/}"
|
||||
randomseed="${outbase:0:10}"
|
||||
echo $randomseed
|
||||
)"
|
||||
export NIX_CFLAGS_COMPILE
|
||||
71
pkgs/build-support/setup-hooks/role.bash
Normal file
71
pkgs/build-support/setup-hooks/role.bash
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
# Since the same derivation can be depend on in multiple ways, we need to
|
||||
# accumulate *each* role (i.e. host and target platforms relative the depending
|
||||
# derivation) in which the derivation is used.
|
||||
#
|
||||
# The role is intened to be use as part of other variables names like
|
||||
# - $NIX_SOMETHING${role_post}
|
||||
|
||||
function getRole() {
|
||||
case $1 in
|
||||
-1)
|
||||
role_post='_FOR_BUILD'
|
||||
;;
|
||||
0)
|
||||
role_post=''
|
||||
;;
|
||||
1)
|
||||
role_post='_FOR_TARGET'
|
||||
;;
|
||||
*)
|
||||
echo "@name@: used as improper sort of dependency" >&2
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# `hostOffset` describes how the host platform of the package is slid relative
|
||||
# to the depending package. `targetOffset` likewise describes the target
|
||||
# platform of the package. Both are brought into scope of the setup hook defined
|
||||
# for dependency whose setup hook is being processed relative to the package
|
||||
# being built.
|
||||
|
||||
function getHostRole() {
|
||||
getRole "$hostOffset"
|
||||
}
|
||||
function getTargetRole() {
|
||||
getRole "$targetOffset"
|
||||
}
|
||||
|
||||
# `depHostOffset` describes how the host platform of the dependencies are slid
|
||||
# relative to the depending package. `depTargetOffset` likewise describes the
|
||||
# target platform of dependenices. Both are brought into scope of the
|
||||
# environment hook defined for the dependency being applied relative to the
|
||||
# package being built.
|
||||
|
||||
function getHostRoleEnvHook() {
|
||||
getRole "$depHostOffset"
|
||||
}
|
||||
function getTargetRoleEnvHook() {
|
||||
getRole "$depTargetOffset"
|
||||
}
|
||||
|
||||
# This variant is inteneded specifically for code-prodocing tool wrapper scripts
|
||||
# `NIX_@wrapperName@_TARGET_*_@suffixSalt@` tracks this (needs to be an exported
|
||||
# env var so can't use fancier data structures).
|
||||
function getTargetRoleWrapper() {
|
||||
case $targetOffset in
|
||||
-1)
|
||||
export NIX_@wrapperName@_TARGET_BUILD_@suffixSalt@=1
|
||||
;;
|
||||
0)
|
||||
export NIX_@wrapperName@_TARGET_HOST_@suffixSalt@=1
|
||||
;;
|
||||
1)
|
||||
export NIX_@wrapperName@_TARGET_TARGET_@suffixSalt@=1
|
||||
;;
|
||||
*)
|
||||
echo "@name@: used as improper sort of dependency" >&2
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
43
pkgs/build-support/setup-hooks/separate-debug-info.sh
Normal file
43
pkgs/build-support/setup-hooks/separate-debug-info.sh
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
export NIX_SET_BUILD_ID=1
|
||||
export NIX_LDFLAGS+=" --compress-debug-sections=zlib"
|
||||
export NIX_CFLAGS_COMPILE+=" -ggdb -Wa,--compress-debug-sections"
|
||||
dontStrip=1
|
||||
|
||||
fixupOutputHooks+=(_separateDebugInfo)
|
||||
|
||||
_separateDebugInfo() {
|
||||
[ -e "$prefix" ] || return 0
|
||||
|
||||
local dst="${debug:-$out}"
|
||||
if [ "$prefix" = "$dst" ]; then return 0; fi
|
||||
|
||||
dst="$dst/lib/debug/.build-id"
|
||||
|
||||
# Find executables and dynamic libraries.
|
||||
local i
|
||||
while IFS= read -r -d $'\0' i; do
|
||||
if ! isELF "$i"; then continue; fi
|
||||
|
||||
# Extract the Build ID. FIXME: there's probably a cleaner way.
|
||||
local id="$($READELF -n "$i" | sed 's/.*Build ID: \([0-9a-f]*\).*/\1/; t; d')"
|
||||
if [ "${#id}" != 40 ]; then
|
||||
echo "could not find build ID of $i, skipping" >&2
|
||||
continue
|
||||
fi
|
||||
|
||||
# Extract the debug info.
|
||||
header "separating debug info from $i (build ID $id)"
|
||||
mkdir -p "$dst/${id:0:2}"
|
||||
|
||||
# This may fail, e.g. if the binary is for a different
|
||||
# architecture than we're building for. (This happens with
|
||||
# firmware blobs in QEMU.)
|
||||
(
|
||||
$OBJCOPY --only-keep-debug "$i" "$dst/${id:0:2}/${id:2}.debug"
|
||||
$STRIP --strip-debug "$i"
|
||||
|
||||
# Also a create a symlink <original-name>.debug.
|
||||
ln -sfn ".build-id/${id:0:2}/${id:2}.debug" "$dst/../$(basename "$i")"
|
||||
) || rmdir -p "$dst/${id:0:2}"
|
||||
done < <(find "$prefix" -type f -print0)
|
||||
}
|
||||
13
pkgs/build-support/setup-hooks/set-java-classpath.sh
Normal file
13
pkgs/build-support/setup-hooks/set-java-classpath.sh
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# This setup hook adds every JAR in the share/java subdirectories of
|
||||
# the build inputs to $CLASSPATH.
|
||||
|
||||
export CLASSPATH
|
||||
|
||||
addPkgToClassPath () {
|
||||
local jar
|
||||
for jar in $1/share/java/*.jar; do
|
||||
export CLASSPATH=''${CLASSPATH-}''${CLASSPATH:+:}''${jar}
|
||||
done
|
||||
}
|
||||
|
||||
addEnvHooks "$targetOffset" addPkgToClassPath
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
updateSourceDateEpoch() {
|
||||
local path="$1"
|
||||
|
||||
# Get the last modification time of all regular files, sort them,
|
||||
# and get the most recent. Maybe we should use
|
||||
# https://github.com/0-wiz-0/findnewest here.
|
||||
local -a res=($(find "$path" -type f -not -newer "$NIX_BUILD_TOP/.." -printf '%T@ %p\0' \
|
||||
| sort -n --zero-terminated | tail -n1 --zero-terminated | head -c -1))
|
||||
local time="${res[0]//\.[0-9]*/}" # remove the fraction part
|
||||
local newestFile="${res[1]}"
|
||||
|
||||
# Update $SOURCE_DATE_EPOCH if the most recent file we found is newer.
|
||||
if [ "${time:-0}" -gt "$SOURCE_DATE_EPOCH" ]; then
|
||||
echo "setting SOURCE_DATE_EPOCH to timestamp $time of file $newestFile"
|
||||
export SOURCE_DATE_EPOCH="$time"
|
||||
|
||||
# Warn if the new timestamp is too close to the present. This
|
||||
# may indicate that we were being applied to a file generated
|
||||
# during the build, or that an unpacker didn't restore
|
||||
# timestamps properly.
|
||||
local now="$(date +%s)"
|
||||
if [ "$time" -gt $((now - 60)) ]; then
|
||||
echo "warning: file $newestFile may be generated; SOURCE_DATE_EPOCH may be non-deterministic"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
postUnpackHooks+=(_updateSourceDateEpochFromSourceRoot)
|
||||
|
||||
_updateSourceDateEpochFromSourceRoot() {
|
||||
if [ -n "$sourceRoot" ]; then
|
||||
updateSourceDateEpoch "$sourceRoot"
|
||||
fi
|
||||
}
|
||||
5
pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh
Normal file
5
pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
setupDebugInfoDirs () {
|
||||
addToSearchPath NIX_DEBUG_INFO_DIRS $1/lib/debug
|
||||
}
|
||||
|
||||
addEnvHooks "$targetOffset" setupDebugInfoDirs
|
||||
88
pkgs/build-support/setup-hooks/shorten-perl-shebang.sh
Normal file
88
pkgs/build-support/setup-hooks/shorten-perl-shebang.sh
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
# This setup hook modifies a Perl script so that any "-I" flags in its shebang
|
||||
# line are rewritten into a "use lib ..." statement on the next line. This gets
|
||||
# around a limitation in Darwin, which will not properly handle a script whose
|
||||
# shebang line exceeds 511 characters.
|
||||
#
|
||||
# Each occurrence of "-I /path/to/lib1" or "-I/path/to/lib2" is removed from
|
||||
# the shebang line, along with the single space that preceded it. These library
|
||||
# paths are placed into a new line of the form
|
||||
#
|
||||
# use lib "/path/to/lib1", "/path/to/lib2";
|
||||
#
|
||||
# immediately following the shebang line. If a library appeared in the original
|
||||
# list more than once, only its first occurrence will appear in the output
|
||||
# list. In other words, the libraries are deduplicated, but the ordering of the
|
||||
# first appearance of each one is preserved.
|
||||
#
|
||||
# Any flags other than "-I" in the shebang line are left as-is, and the
|
||||
# interpreter is also left alone (although the script will abort if the
|
||||
# interpreter does not seem to be either "perl" or else "env" with "perl" as
|
||||
# its argument). Each line after the shebang line is left unchanged. Each file
|
||||
# is modified in place.
|
||||
#
|
||||
# Usage:
|
||||
# shortenPerlShebang SCRIPT...
|
||||
|
||||
shortenPerlShebang() {
|
||||
while [ $# -gt 0 ]; do
|
||||
_shortenPerlShebang "$1"
|
||||
shift
|
||||
done
|
||||
}
|
||||
|
||||
_shortenPerlShebang() {
|
||||
local program="$1"
|
||||
|
||||
echo "shortenPerlShebang: rewriting shebang line in $program"
|
||||
|
||||
if ! isScript "$program"; then
|
||||
die "shortenPerlShebang: refusing to modify $program because it is not a script"
|
||||
fi
|
||||
|
||||
local temp="$(mktemp)"
|
||||
|
||||
gawk '
|
||||
(NR == 1) {
|
||||
if (!($0 ~ /\/(perl|env +perl)\>/)) {
|
||||
print "shortenPerlShebang: script does not seem to be a Perl script" > "/dev/stderr"
|
||||
exit 1
|
||||
}
|
||||
idx = 0
|
||||
while (match($0, / -I ?([^ ]+)/, pieces)) {
|
||||
matches[idx] = pieces[1]
|
||||
idx++
|
||||
$0 = gensub(/ -I ?[^ ]+/, "", 1, $0)
|
||||
}
|
||||
print $0
|
||||
if (idx > 0) {
|
||||
prefix = "use lib "
|
||||
for (idx in matches) {
|
||||
path = matches[idx]
|
||||
if (!(path in seen)) {
|
||||
printf "%s\"%s\"", prefix, path
|
||||
seen[path] = 1
|
||||
prefix = ", "
|
||||
}
|
||||
}
|
||||
print ";"
|
||||
}
|
||||
}
|
||||
(NR > 1 ) {
|
||||
print
|
||||
}
|
||||
' "$program" > "$temp" || die
|
||||
# Preserve the mode of the original file
|
||||
cp --preserve=mode --attributes-only "$program" "$temp"
|
||||
mv "$temp" "$program"
|
||||
|
||||
# Measure the new shebang line length and make sure it's okay. We subtract
|
||||
# one to account for the trailing newline that "head" included in its
|
||||
# output.
|
||||
local new_length=$(( $(head -n 1 "$program" | wc -c) - 1 ))
|
||||
|
||||
# Darwin is okay when the shebang line contains 511 characters, but not
|
||||
# when it contains 512 characters.
|
||||
if [ $new_length -ge 512 ]; then
|
||||
die "shortenPerlShebang: shebang line is $new_length characters--still too long for Darwin!"
|
||||
fi
|
||||
}
|
||||
57
pkgs/build-support/setup-hooks/strip.sh
Normal file
57
pkgs/build-support/setup-hooks/strip.sh
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
# This setup hook strips libraries and executables in the fixup phase.
|
||||
|
||||
fixupOutputHooks+=(_doStrip)
|
||||
|
||||
_doStrip() {
|
||||
# We don't bother to strip build platform code because it shouldn't make it
|
||||
# to $out anyways---if it does, that's a bigger problem that a lack of
|
||||
# stripping will help catch.
|
||||
local -ra flags=(dontStripHost dontStripTarget)
|
||||
local -ra stripCmds=(STRIP TARGET_STRIP)
|
||||
|
||||
# Optimization
|
||||
if [[ "${STRIP-}" == "${TARGET_STRIP-}" ]]; then
|
||||
dontStripTarget+=1
|
||||
fi
|
||||
|
||||
local i
|
||||
for i in ${!stripCmds[@]}; do
|
||||
local -n flag="${flags[$i]}"
|
||||
local -n stripCmd="${stripCmds[$i]}"
|
||||
|
||||
# `dontStrip` disables them all
|
||||
if [[ "${dontStrip-}" || "${flag-}" ]] || ! type -f "${stripCmd-}" 2>/dev/null
|
||||
then continue; fi
|
||||
|
||||
stripDebugList=${stripDebugList:-lib lib32 lib64 libexec bin sbin}
|
||||
if [ -n "$stripDebugList" ]; then
|
||||
stripDirs "$stripCmd" "$stripDebugList" "${stripDebugFlags:--S}"
|
||||
fi
|
||||
|
||||
stripAllList=${stripAllList:-}
|
||||
if [ -n "$stripAllList" ]; then
|
||||
stripDirs "$stripCmd" "$stripAllList" "${stripAllFlags:--s}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
stripDirs() {
|
||||
local cmd="$1"
|
||||
local dirs="$2"
|
||||
local stripFlags="$3"
|
||||
local dirsNew=
|
||||
|
||||
local d
|
||||
for d in ${dirs}; do
|
||||
if [ -d "$prefix/$d" ]; then
|
||||
dirsNew="${dirsNew} $prefix/$d "
|
||||
fi
|
||||
done
|
||||
dirs=${dirsNew}
|
||||
|
||||
if [ -n "${dirs}" ]; then
|
||||
header "stripping (with command $cmd and flags $stripFlags) in$dirs"
|
||||
find $dirs -type f -exec $cmd $stripFlags '{}' \; 2>/dev/null
|
||||
stopNest
|
||||
fi
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
preConfigurePhases+=" updateAutotoolsGnuConfigScriptsPhase"
|
||||
|
||||
updateAutotoolsGnuConfigScriptsPhase() {
|
||||
if [ -n "${dontUpdateAutotoolsGnuConfigScripts-}" ]; then return; fi
|
||||
|
||||
for script in config.sub config.guess; do
|
||||
for f in $(find . -type f -name "$script"); do
|
||||
echo "Updating Autotools / GNU config script to a newer upstream version: $f"
|
||||
cp -f "@gnu_config@/$script" "$f"
|
||||
done
|
||||
done
|
||||
}
|
||||
1
pkgs/build-support/setup-hooks/use-old-cxx-abi.sh
Normal file
1
pkgs/build-support/setup-hooks/use-old-cxx-abi.sh
Normal file
|
|
@ -0,0 +1 @@
|
|||
export NIX_CFLAGS_COMPILE+=" -D_GLIBCXX_USE_CXX11_ABI=0"
|
||||
18
pkgs/build-support/setup-hooks/validate-pkg-config.sh
Normal file
18
pkgs/build-support/setup-hooks/validate-pkg-config.sh
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# This setup hook validates each pkgconfig file in each output.
|
||||
|
||||
fixupOutputHooks+=(_validatePkgConfig)
|
||||
|
||||
_validatePkgConfig() {
|
||||
local bail=0
|
||||
for pc in $(find "$prefix" -name '*.pc'); do
|
||||
# Do not fail immediately. It's nice to see all errors when
|
||||
# there are multiple pkgconfig files.
|
||||
if ! $PKG_CONFIG --validate "$pc"; then
|
||||
bail=1
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $bail -eq 1 ]; then
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
45
pkgs/build-support/setup-hooks/win-dll-link.sh
Normal file
45
pkgs/build-support/setup-hooks/win-dll-link.sh
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
|
||||
fixupOutputHooks+=(_linkDLLs)
|
||||
|
||||
# For every *.{exe,dll} in $output/bin/ we try to find all (potential)
|
||||
# transitive dependencies and symlink those DLLs into $output/bin
|
||||
# so they are found on invocation.
|
||||
# (DLLs are first searched in the directory of the running exe file.)
|
||||
# The links are relative, so relocating whole /nix/store won't break them.
|
||||
_linkDLLs() {
|
||||
(
|
||||
if [ ! -d "$prefix/bin" ]; then exit; fi
|
||||
cd "$prefix/bin"
|
||||
|
||||
# Compose path list where DLLs should be located:
|
||||
# prefix $PATH by currently-built outputs
|
||||
local DLLPATH=""
|
||||
local outName
|
||||
for outName in $outputs; do
|
||||
addToSearchPath DLLPATH "${!outName}/bin"
|
||||
done
|
||||
DLLPATH="$DLLPATH:$PATH"
|
||||
|
||||
echo DLLPATH="'$DLLPATH'"
|
||||
|
||||
linkCount=0
|
||||
# Iterate over any DLL that we depend on.
|
||||
local dll
|
||||
for dll in $($OBJDUMP -p *.{exe,dll} | sed -n 's/.*DLL Name: \(.*\)/\1/p' | sort -u); do
|
||||
if [ -e "./$dll" ]; then continue; fi
|
||||
# Locate the DLL - it should be an *executable* file on $DLLPATH.
|
||||
local dllPath="$(PATH="$DLLPATH" type -P "$dll")"
|
||||
if [ -z "$dllPath" ]; then continue; fi
|
||||
# That DLL might have its own (transitive) dependencies,
|
||||
# so add also all DLLs from its directory to be sure.
|
||||
local dllPath2
|
||||
for dllPath2 in "$dllPath" "$(dirname $(readlink "$dllPath" || echo "$dllPath"))"/*.dll; do
|
||||
if [ -e ./"$(basename "$dllPath2")" ]; then continue; fi
|
||||
CYGWIN+=\ winsymlinks:nativestrict ln -sr "$dllPath2" .
|
||||
linkCount=$(($linkCount+1))
|
||||
done
|
||||
done
|
||||
echo "Created $linkCount DLL link(s) in $prefix/bin"
|
||||
)
|
||||
}
|
||||
|
||||
177
pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix
Normal file
177
pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix
Normal file
|
|
@ -0,0 +1,177 @@
|
|||
{ stdenv
|
||||
, lib
|
||||
, makeSetupHook
|
||||
, makeWrapper
|
||||
, gobject-introspection
|
||||
, isGraphical ? true
|
||||
, gtk3
|
||||
, librsvg
|
||||
, dconf
|
||||
, callPackage
|
||||
, wrapGAppsHook
|
||||
, writeTextFile
|
||||
}:
|
||||
|
||||
makeSetupHook {
|
||||
deps = lib.optionals (!stdenv.isDarwin) [
|
||||
# It is highly probable that a program will use GSettings,
|
||||
# at minimum through GTK file chooser dialogue.
|
||||
# Let’s add a GIO module for “dconf” GSettings backend
|
||||
# to avoid falling back to “memory” backend. This is
|
||||
# required for GSettings-based settings to be persisted.
|
||||
# Unfortunately, it also requires the user to have dconf
|
||||
# D-Bus service enabled globally (e.g. through a NixOS module).
|
||||
dconf.lib
|
||||
] ++ lib.optionals isGraphical [
|
||||
# TODO: remove this, packages should depend on GTK explicitly.
|
||||
gtk3
|
||||
|
||||
# librsvg provides a module for gdk-pixbuf to allow rendering
|
||||
# SVG icons. Most icon themes are SVG-based and so are some
|
||||
# graphics in GTK (e.g. cross for closing window in window title bar)
|
||||
# so it is pretty much required for applications using GTK.
|
||||
librsvg
|
||||
] ++ [
|
||||
|
||||
# We use the wrapProgram function.
|
||||
makeWrapper
|
||||
];
|
||||
substitutions = {
|
||||
passthru.tests = let
|
||||
sample-project = ./tests/sample-project;
|
||||
|
||||
testLib = callPackage ./tests/lib.nix { };
|
||||
inherit (testLib) expectSomeLineContainingYInFileXToMentionZ;
|
||||
in rec {
|
||||
# Simple derivation containing a program and a daemon.
|
||||
basic = stdenv.mkDerivation {
|
||||
name = "basic";
|
||||
|
||||
src = sample-project;
|
||||
|
||||
nativeBuildInputs = [ wrapGAppsHook ];
|
||||
|
||||
installFlags = [ "bin-foo" "libexec-bar" ];
|
||||
};
|
||||
|
||||
# The wrapper for executable files should add path to dconf GIO module.
|
||||
basic-contains-dconf = let
|
||||
tested = basic;
|
||||
in testLib.runTest "basic-contains-dconf" (
|
||||
testLib.skip stdenv.isDarwin ''
|
||||
${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GIO_EXTRA_MODULES" "${dconf.lib}/lib/gio/modules"}
|
||||
${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GIO_EXTRA_MODULES" "${dconf.lib}/lib/gio/modules"}
|
||||
''
|
||||
);
|
||||
|
||||
# Simple derivation containing a gobject-introspection typelib.
|
||||
typelib-Mahjong = stdenv.mkDerivation {
|
||||
name = "typelib-Mahjong";
|
||||
|
||||
src = sample-project;
|
||||
|
||||
installFlags = [ "typelib-Mahjong" ];
|
||||
};
|
||||
|
||||
# Simple derivation using a typelib.
|
||||
typelib-user = stdenv.mkDerivation {
|
||||
name = "typelib-user";
|
||||
|
||||
src = sample-project;
|
||||
|
||||
nativeBuildInputs = [
|
||||
gobject-introspection
|
||||
wrapGAppsHook
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
typelib-Mahjong
|
||||
];
|
||||
|
||||
installFlags = [ "bin-foo" "libexec-bar" ];
|
||||
};
|
||||
|
||||
# Testing cooperation with gobject-introspection setup hook,
|
||||
# which should populate GI_TYPELIB_PATH variable with paths
|
||||
# to typelibs among the derivation’s dependencies.
|
||||
# The resulting GI_TYPELIB_PATH should be picked up by the wrapper.
|
||||
typelib-user-has-gi-typelib-path = let
|
||||
tested = typelib-user;
|
||||
in testLib.runTest "typelib-user-has-gi-typelib-path" ''
|
||||
${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH" "${typelib-Mahjong}/lib/girepository-1.0"}
|
||||
${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH" "${typelib-Mahjong}/lib/girepository-1.0"}
|
||||
'';
|
||||
|
||||
# Simple derivation containing a gobject-introspection typelib in lib output.
|
||||
typelib-Bechamel = stdenv.mkDerivation {
|
||||
name = "typelib-Bechamel";
|
||||
|
||||
outputs = [ "out" "lib" ];
|
||||
|
||||
src = sample-project;
|
||||
|
||||
makeFlags = [
|
||||
"LIBDIR=${placeholder "lib"}/lib"
|
||||
];
|
||||
|
||||
installFlags = [ "typelib-Bechamel" ];
|
||||
};
|
||||
|
||||
# Simple derivation using a typelib from non-default output.
|
||||
typelib-multiout-user = stdenv.mkDerivation {
|
||||
name = "typelib-multiout-user";
|
||||
|
||||
src = sample-project;
|
||||
|
||||
nativeBuildInputs = [
|
||||
gobject-introspection
|
||||
wrapGAppsHook
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
typelib-Bechamel
|
||||
];
|
||||
|
||||
installFlags = [ "bin-foo" "libexec-bar" ];
|
||||
};
|
||||
|
||||
# Testing cooperation with gobject-introspection setup hook,
|
||||
# which should populate GI_TYPELIB_PATH variable with paths
|
||||
# to typelibs among the derivation’s dependencies,
|
||||
# even when they are not in default output.
|
||||
# The resulting GI_TYPELIB_PATH should be picked up by the wrapper.
|
||||
typelib-multiout-user-has-gi-typelib-path = let
|
||||
tested = typelib-multiout-user;
|
||||
in testLib.runTest "typelib-multiout-user-has-gi-typelib-path" ''
|
||||
${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH" "${typelib-Bechamel.lib}/lib/girepository-1.0"}
|
||||
${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH" "${typelib-Bechamel.lib}/lib/girepository-1.0"}
|
||||
'';
|
||||
|
||||
# Simple derivation that contains a typelib as well as a program using it.
|
||||
typelib-self-user = stdenv.mkDerivation {
|
||||
name = "typelib-self-user";
|
||||
|
||||
src = sample-project;
|
||||
|
||||
nativeBuildInputs = [
|
||||
gobject-introspection
|
||||
wrapGAppsHook
|
||||
];
|
||||
|
||||
installFlags = [ "typelib-Cow" "bin-foo" "libexec-bar" ];
|
||||
};
|
||||
|
||||
# Testing cooperation with gobject-introspection setup hook,
|
||||
# which should add the path to derivation’s own typelibs
|
||||
# to GI_TYPELIB_PATH variable.
|
||||
# The resulting GI_TYPELIB_PATH should be picked up by the wrapper.
|
||||
# https://github.com/NixOS/nixpkgs/issues/85515
|
||||
typelib-self-user-has-gi-typelib-path = let
|
||||
tested = typelib-self-user;
|
||||
in testLib.runTest "typelib-self-user-has-gi-typelib-path" ''
|
||||
${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH" "${typelib-self-user}/lib/girepository-1.0"}
|
||||
${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH" "${typelib-self-user}/lib/girepository-1.0"}
|
||||
'';
|
||||
};
|
||||
};
|
||||
} ./wrap-gapps-hook.sh
|
||||
31
pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix
Normal file
31
pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
{ lib, runCommand }:
|
||||
|
||||
rec {
|
||||
runTest = name: body: runCommand name { } ''
|
||||
set -o errexit
|
||||
${body}
|
||||
touch $out
|
||||
'';
|
||||
|
||||
skip = cond: text:
|
||||
if cond then ''
|
||||
echo "Skipping test $name" > /dev/stderr
|
||||
'' else text;
|
||||
|
||||
fail = text: ''
|
||||
echo "FAIL: $name: ${text}" > /dev/stderr
|
||||
exit 1
|
||||
'';
|
||||
|
||||
expectSomeLineContainingYInFileXToMentionZ = file: filter: expected: ''
|
||||
file=${lib.escapeShellArg file} filter=${lib.escapeShellArg filter} expected=${lib.escapeShellArg expected}
|
||||
|
||||
if ! grep --text --quiet "$filter" "$file"; then
|
||||
${fail "The file “$file” should include a line containing “$filter”."}
|
||||
fi
|
||||
|
||||
if ! grep --text "$filter" "$file" | grep --text --quiet "$expected"; then
|
||||
${fail "The file “$file” should include a line containing “$filter” that also contains “$expected”."}
|
||||
fi
|
||||
'';
|
||||
}
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
PREFIX = $(out)
|
||||
BINDIR = $(PREFIX)/bin
|
||||
LIBEXECDIR = $(PREFIX)/libexec
|
||||
LIBDIR = $(PREFIX)/lib
|
||||
TYPELIBDIR = $(LIBDIR)/girepository-1.0
|
||||
|
||||
all:
|
||||
echo "Compiling…"
|
||||
install:
|
||||
echo "Installing…"
|
||||
|
||||
bin:
|
||||
mkdir -p $(BINDIR)
|
||||
# Adds `bin-${foo}` targets, that install `${foo}` executable to `$(BINDIR)`.
|
||||
bin-%: bin
|
||||
touch $(BINDIR)/$(@:bin-%=%)
|
||||
chmod +x $(BINDIR)/$(@:bin-%=%)
|
||||
|
||||
libexec:
|
||||
mkdir -p $(LIBEXECDIR)
|
||||
# Adds `libexec-${foo}` targets, that install `${foo}` executable to `$(LIBEXECDIR)`.
|
||||
libexec-%: libexec
|
||||
touch $(LIBEXECDIR)/$(@:libexec-%=%)
|
||||
chmod +x $(LIBEXECDIR)/$(@:libexec-%=%)
|
||||
|
||||
typelib:
|
||||
mkdir -p $(TYPELIBDIR)
|
||||
# Adds `typelib-${foo}` targets, that install `${foo}-1.0.typelib` file to `$(TYPELIBDIR)`.
|
||||
typelib-%: typelib
|
||||
touch $(TYPELIBDIR)/$(@:typelib-%=%)-1.0.typelib
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
# shellcheck shell=bash
|
||||
gappsWrapperArgs=()
|
||||
|
||||
find_gio_modules() {
|
||||
if [ -d "$1/lib/gio/modules" ] && [ -n "$(ls -A "$1/lib/gio/modules")" ] ; then
|
||||
gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$1/lib/gio/modules")
|
||||
fi
|
||||
}
|
||||
|
||||
addEnvHooks "${targetOffset:?}" find_gio_modules
|
||||
|
||||
gappsWrapperArgsHook() {
|
||||
if [ -n "$GDK_PIXBUF_MODULE_FILE" ]; then
|
||||
gappsWrapperArgs+=(--set GDK_PIXBUF_MODULE_FILE "$GDK_PIXBUF_MODULE_FILE")
|
||||
fi
|
||||
|
||||
if [ -n "$GSETTINGS_SCHEMAS_PATH" ]; then
|
||||
gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$GSETTINGS_SCHEMAS_PATH")
|
||||
fi
|
||||
|
||||
# Check for prefix as well
|
||||
if [ -d "${prefix:?}/share" ]; then
|
||||
gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$prefix/share")
|
||||
fi
|
||||
|
||||
if [ -d "$prefix/lib/gio/modules" ] && [ -n "$(ls -A "$prefix/lib/gio/modules")" ]; then
|
||||
gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$prefix/lib/gio/modules")
|
||||
fi
|
||||
|
||||
for v in ${wrapPrefixVariables:-} GST_PLUGIN_SYSTEM_PATH_1_0 GI_TYPELIB_PATH GRL_PLUGIN_PATH; do
|
||||
if [ -n "${!v}" ]; then
|
||||
gappsWrapperArgs+=(--prefix "$v" : "${!v}")
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
preFixupPhases+=" gappsWrapperArgsHook"
|
||||
|
||||
wrapGApp() {
|
||||
local program="$1"
|
||||
shift 1
|
||||
wrapProgram "$program" "${gappsWrapperArgs[@]}" "$@"
|
||||
}
|
||||
|
||||
# Note: $gappsWrapperArgs still gets defined even if ${dontWrapGApps-} is set.
|
||||
wrapGAppsHook() {
|
||||
# guard against running multiple times (e.g. due to propagation)
|
||||
[ -z "$wrapGAppsHookHasRun" ] || return 0
|
||||
wrapGAppsHookHasRun=1
|
||||
|
||||
if [[ -z "${dontWrapGApps:-}" ]]; then
|
||||
targetDirsThatExist=()
|
||||
targetDirsRealPath=()
|
||||
|
||||
# wrap binaries
|
||||
targetDirs=("${prefix}/bin" "${prefix}/libexec")
|
||||
for targetDir in "${targetDirs[@]}"; do
|
||||
if [[ -d "${targetDir}" ]]; then
|
||||
targetDirsThatExist+=("${targetDir}")
|
||||
targetDirsRealPath+=("$(realpath "${targetDir}")/")
|
||||
find "${targetDir}" -type f -executable -print0 |
|
||||
while IFS= read -r -d '' file; do
|
||||
echo "Wrapping program '${file}'"
|
||||
wrapGApp "${file}"
|
||||
done
|
||||
fi
|
||||
done
|
||||
|
||||
# wrap links to binaries that point outside targetDirs
|
||||
# Note: links to binaries within targetDirs do not need
|
||||
# to be wrapped as the binaries have already been wrapped
|
||||
if [[ ${#targetDirsThatExist[@]} -ne 0 ]]; then
|
||||
find "${targetDirsThatExist[@]}" -type l -xtype f -executable -print0 |
|
||||
while IFS= read -r -d '' linkPath; do
|
||||
linkPathReal=$(realpath "${linkPath}")
|
||||
for targetPath in "${targetDirsRealPath[@]}"; do
|
||||
if [[ "$linkPathReal" == "$targetPath"* ]]; then
|
||||
echo "Not wrapping link: '$linkPath' (already wrapped)"
|
||||
continue 2
|
||||
fi
|
||||
done
|
||||
echo "Wrapping link: '$linkPath'"
|
||||
wrapGApp "${linkPath}"
|
||||
done
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
fixupOutputHooks+=(wrapGAppsHook)
|
||||
Loading…
Add table
Add a link
Reference in a new issue