diff --git a/default.nix b/default.nix
new file mode 100644
index 000000000..b22e926ea
--- /dev/null
+++ b/default.nix
@@ -0,0 +1,3 @@
+(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
+ src = builtins.fetchGit ./.;
+}).defaultNix
diff --git a/flake.lock b/flake.lock
new file mode 100644
index 000000000..b448bcae4
--- /dev/null
+++ b/flake.lock
@@ -0,0 +1,65 @@
+{
+ "nodes": {
+ "nix": {
+ "info": {
+ "lastModified": 1585573619,
+ "narHash": "sha256-QbAxdnDkVvSVgkCNRbt3fUPUSNbeq9y3u7Vek/7Ib44="
+ },
+ "inputs": {
+ "nixpkgs": "nixpkgs"
+ },
+ "locked": {
+ "owner": "NixOS",
+ "repo": "nix",
+ "rev": "3e7aab81ce6787e01ea4ced5af1fc6a84e523762",
+ "type": "github"
+ },
+ "original": {
+ "id": "nix",
+ "type": "indirect"
+ }
+ },
+ "nixpkgs": {
+ "info": {
+ "lastModified": 1585405475,
+ "narHash": "sha256-bESW0n4KgPmZ0luxvwJ+UyATrC6iIltVCsGdLiphVeE="
+ },
+ "locked": {
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+ "rev": "b88ff468e9850410070d4e0ccd68c7011f15b2be",
+ "type": "github"
+ },
+ "original": {
+ "id": "nixpkgs",
+ "ref": "nixos-20.03-small",
+ "type": "indirect"
+ }
+ },
+ "nixpkgs_2": {
+ "info": {
+ "lastModified": 1585388205,
+ "narHash": "sha256-lOXYmCE6FSikoOHr1HFIkNLnA0qdYhe8CxB8rE6+SnE="
+ },
+ "locked": {
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+ "rev": "b0c285807d6a9f1b7562ec417c24fa1a30ecc31a",
+ "type": "github"
+ },
+ "original": {
+ "id": "nixpkgs",
+ "ref": "nixos-20.03",
+ "type": "indirect"
+ }
+ },
+ "root": {
+ "inputs": {
+ "nix": "nix",
+ "nixpkgs": "nixpkgs_2"
+ }
+ }
+ },
+ "root": "root",
+ "version": 5
+}
diff --git a/flake.nix b/flake.nix
new file mode 100644
index 000000000..859695096
--- /dev/null
+++ b/flake.nix
@@ -0,0 +1,345 @@
+{
+ description = "A Nix-based continuous build system";
+
+ edition = 201909;
+
+ inputs.nixpkgs.url = "nixpkgs/nixos-20.03";
+
+ outputs = { self, nixpkgs, nix }:
+ let
+
+ version = "${builtins.readFile ./version}.${builtins.substring 0 8 self.lastModified}.${self.shortRev or "DIRTY"}";
+
+ pkgs = import nixpkgs {
+ system = "x86_64-linux";
+ overlays = [ self.overlay nix.overlay ];
+ };
+
+ # NixOS configuration used for VM tests.
+ hydraServer =
+ { config, pkgs, ... }:
+ { imports = [ self.nixosModules.hydraTest ];
+
+ virtualisation.memorySize = 1024;
+ virtualisation.writableStore = true;
+
+ environment.systemPackages = [ pkgs.perlPackages.LWP pkgs.perlPackages.JSON ];
+
+ nix = {
+ # Without this nix tries to fetch packages from the default
+ # cache.nixos.org which is not reachable from this sandboxed NixOS test.
+ binaryCaches = [];
+ };
+ };
+
+ in rec {
+
+ # A Nixpkgs overlay that provides a 'hydra' package.
+ overlay = final: prev: {
+
+ hydra = with final; let
+
+ perlDeps = buildEnv {
+ name = "hydra-perl-deps";
+ paths = with perlPackages; lib.closePropagation
+ [ ModulePluggable
+ CatalystActionREST
+ CatalystAuthenticationStoreDBIxClass
+ CatalystDevel
+ CatalystDispatchTypeRegex
+ CatalystPluginAccessLog
+ CatalystPluginAuthorizationRoles
+ CatalystPluginCaptcha
+ CatalystPluginSessionStateCookie
+ CatalystPluginSessionStoreFastMmap
+ CatalystPluginStackTrace
+ CatalystPluginUnicodeEncoding
+ CatalystTraitForRequestProxyBase
+ CatalystViewDownload
+ CatalystViewJSON
+ CatalystViewTT
+ CatalystXScriptServerStarman
+ CatalystXRoleApplicator
+ CryptRandPasswd
+ DBDPg
+ DBDSQLite
+ DataDump
+ DateTime
+ DigestSHA1
+ EmailMIME
+ EmailSender
+ FileSlurp
+ IOCompress
+ IPCRun
+ JSON
+ JSONAny
+ JSONXS
+ LWP
+ LWPProtocolHttps
+ NetAmazonS3
+ NetPrometheus
+ NetStatsd
+ PadWalker
+ Readonly
+ SQLSplitStatement
+ SetScalar
+ Starman
+ SysHostnameLong
+ TermSizeAny
+ TestMore
+ TextDiff
+ TextTable
+ XMLSimple
+ final.nix
+ final.nix.perl-bindings
+ git
+ ];
+ };
+
+ in stdenv.mkDerivation {
+
+ name = "hydra-${version}";
+
+ src = self;
+
+ buildInputs =
+ [ makeWrapper autoconf automake libtool unzip nukeReferences pkgconfig sqlite libpqxx
+ gitAndTools.topGit mercurial darcs subversion bazaar openssl bzip2 libxslt
+ perlDeps perl final.nix
+ postgresql95 # for running the tests
+ boost
+ (if lib.versionAtLeast lib.version "20.03pre"
+ then nlohmann_json
+ else nlohmann_json.override { multipleHeaders = true; })
+ ];
+
+ hydraPath = lib.makeBinPath (
+ [ sqlite subversion openssh final.nix coreutils findutils pixz
+ gzip bzip2 lzma gnutar unzip git gitAndTools.topGit mercurial darcs gnused bazaar
+ ] ++ lib.optionals stdenv.isLinux [ rpm dpkg cdrkit ] );
+
+ configureFlags = [ "--with-docbook-xsl=${docbook_xsl}/xml/xsl/docbook" ];
+
+ shellHook = ''
+ PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-eval-jobs:$(pwd)/src/hydra-queue-runner:$PATH
+ PERL5LIB=$(pwd)/src/lib:$PERL5LIB
+ '';
+
+ preConfigure = "autoreconf -vfi";
+
+ NIX_LDFLAGS = [ "-lpthread" ];
+
+ enableParallelBuilding = true;
+
+ doCheck = true;
+
+ preCheck = ''
+ patchShebangs .
+ export LOGNAME=''${LOGNAME:-foo}
+ '';
+
+ postInstall = ''
+ mkdir -p $out/nix-support
+
+ for i in $out/bin/*; do
+ read -n 4 chars < $i
+ if [[ $chars =~ ELF ]]; then continue; fi
+ wrapProgram $i \
+ --prefix PERL5LIB ':' $out/libexec/hydra/lib:$PERL5LIB \
+ --prefix PATH ':' $out/bin:$hydraPath \
+ --set HYDRA_RELEASE ${version} \
+ --set HYDRA_HOME $out/libexec/hydra \
+ --set NIX_RELEASE ${final.nix.name or "unknown"}
+ done
+ '';
+
+ dontStrip = true;
+
+ meta.description = "Build of Hydra on ${system}";
+ passthru.perlDeps = perlDeps;
+ };
+ };
+
+ hydraJobs = {
+
+ build.x86_64-linux = packages.x86_64-linux.hydra;
+
+ manual =
+ pkgs.runCommand "hydra-manual-${version}" {}
+ ''
+ mkdir -p $out/share
+ cp -prvd ${pkgs.hydra}/share/doc $out/share/
+
+ mkdir $out/nix-support
+ echo "doc manual $out/share/doc/hydra" >> $out/nix-support/hydra-build-products
+ '';
+
+ tests.install.x86_64-linux =
+ with import (nixpkgs + "/nixos/lib/testing.nix") { system = "x86_64-linux"; };
+ simpleTest {
+ machine = hydraServer;
+ testScript =
+ ''
+ $machine->waitForJob("hydra-init");
+ $machine->waitForJob("hydra-server");
+ $machine->waitForJob("hydra-evaluator");
+ $machine->waitForJob("hydra-queue-runner");
+ $machine->waitForOpenPort("3000");
+ $machine->succeed("curl --fail http://localhost:3000/");
+ '';
+ };
+
+ tests.api.x86_64-linux =
+ with import (nixpkgs + "/nixos/lib/testing.nix") { system = "x86_64-linux"; };
+ simpleTest {
+ machine = hydraServer;
+ testScript =
+ let dbi = "dbi:Pg:dbname=hydra;user=root;"; in
+ ''
+ $machine->waitForJob("hydra-init");
+
+ # Create an admin account and some other state.
+ $machine->succeed
+ ( "su - hydra -c \"hydra-create-user root --email-address 'alice\@example.org' --password foobar --role admin\""
+ , "mkdir /run/jobset /tmp/nix"
+ , "chmod 755 /run/jobset /tmp/nix"
+ , "cp ${./tests/api-test.nix} /run/jobset/default.nix"
+ , "chmod 644 /run/jobset/default.nix"
+ , "chown -R hydra /run/jobset /tmp/nix"
+ );
+
+ $machine->succeed("systemctl stop hydra-evaluator hydra-queue-runner");
+ $machine->waitForJob("hydra-server");
+ $machine->waitForOpenPort("3000");
+
+ # Run the API tests.
+ $machine->mustSucceed("su - hydra -c 'perl -I ${pkgs.hydra.perlDeps}/lib/perl5/site_perl ${./tests/api-test.pl}' >&2");
+ '';
+ };
+
+ tests.notifications.x86_64-linux =
+ with import (nixpkgs + "/nixos/lib/testing.nix") { system = "x86_64-linux"; };
+ simpleTest {
+ machine = { pkgs, ... }: {
+ imports = [ hydraServer ];
+ services.hydra-dev.extraConfig = ''
+
+ url = http://127.0.0.1:8086
+ db = hydra
+
+ '';
+ services.influxdb.enable = true;
+ };
+ testScript = ''
+ $machine->waitForJob("hydra-init");
+
+ # Create an admin account and some other state.
+ $machine->succeed
+ ( "su - hydra -c \"hydra-create-user root --email-address 'alice\@example.org' --password foobar --role admin\""
+ , "mkdir /run/jobset"
+ , "chmod 755 /run/jobset"
+ , "cp ${./tests/api-test.nix} /run/jobset/default.nix"
+ , "chmod 644 /run/jobset/default.nix"
+ , "chown -R hydra /run/jobset"
+ );
+
+ # Wait until InfluxDB can receive web requests
+ $machine->waitForJob("influxdb");
+ $machine->waitForOpenPort("8086");
+
+ # Create an InfluxDB database where hydra will write to
+ $machine->succeed(
+ "curl -XPOST 'http://127.0.0.1:8086/query' \\
+ --data-urlencode 'q=CREATE DATABASE hydra'");
+
+ # Wait until hydra-server can receive HTTP requests
+ $machine->waitForJob("hydra-server");
+ $machine->waitForOpenPort("3000");
+
+ # Setup the project and jobset
+ $machine->mustSucceed(
+ "su - hydra -c 'perl -I ${pkgs.hydra.perlDeps}/lib/perl5/site_perl ${./tests/setup-notifications-jobset.pl}' >&2");
+
+ # Wait until hydra has build the job and
+ # the InfluxDBNotification plugin uploaded its notification to InfluxDB
+ $machine->waitUntilSucceeds(
+ "curl -s -H 'Accept: application/csv' \\
+ -G 'http://127.0.0.1:8086/query?db=hydra' \\
+ --data-urlencode 'q=SELECT * FROM hydra_build_status' | grep success");
+ '';
+ };
+
+ container = nixosConfigurations.container.config.system.build.toplevel;
+ };
+
+ checks.x86_64-linux.build = hydraJobs.build.x86_64-linux;
+ checks.x86_64-linux.install = hydraJobs.tests.install.x86_64-linux;
+
+ packages.x86_64-linux.hydra = pkgs.hydra;
+ defaultPackage.x86_64-linux = pkgs.hydra;
+
+ nixosModules.hydra = {
+ imports = [ ./hydra-module.nix ];
+ nixpkgs.overlays = [ self.overlay nix.overlay ];
+ };
+
+ nixosModules.hydraTest = {
+ imports = [ self.nixosModules.hydra ];
+
+ services.hydra-dev.enable = true;
+ services.hydra-dev.hydraURL = "http://hydra.example.org";
+ services.hydra-dev.notificationSender = "admin@hydra.example.org";
+
+ systemd.services.hydra-send-stats.enable = false;
+
+ services.postgresql.enable = true;
+ services.postgresql.package = pkgs.postgresql95;
+
+ # The following is to work around the following error from hydra-server:
+ # [error] Caught exception in engine "Cannot determine local time zone"
+ time.timeZone = "UTC";
+
+ nix.extraOptions = ''
+ allowed-uris = https://github.com/
+ '';
+ };
+
+ nixosModules.hydraProxy = {
+ services.httpd = {
+ enable = true;
+ adminAddr = "hydra-admin@example.org";
+ extraConfig = ''
+
+ Order deny,allow
+ Allow from all
+
+
+ ProxyRequests Off
+ ProxyPreserveHost On
+ ProxyPass /apache-errors !
+ ErrorDocument 503 /apache-errors/503.html
+ ProxyPass / http://127.0.0.1:3000/ retry=5 disablereuse=on
+ ProxyPassReverse / http://127.0.0.1:3000/
+ '';
+ };
+ };
+
+ nixosConfigurations.container = nixpkgs.lib.nixosSystem {
+ system = "x86_64-linux";
+ modules =
+ [ self.nixosModules.hydraTest
+ self.nixosModules.hydraProxy
+ { system.configurationRevision = self.rev;
+
+ boot.isContainer = true;
+ networking.useDHCP = false;
+ networking.firewall.allowedTCPPorts = [ 80 ];
+ networking.hostName = "hydra";
+
+ services.hydra-dev.useSubstitutes = true;
+ }
+ ];
+ };
+
+ };
+}
diff --git a/hydra-module.nix b/hydra-module.nix
index 37fb951f4..7d80c0d68 100644
--- a/hydra-module.nix
+++ b/hydra-module.nix
@@ -64,7 +64,7 @@ in
package = mkOption {
type = types.path;
- #default = pkgs.hydra;
+ default = pkgs.hydra;
description = "The Hydra package.";
};
@@ -218,8 +218,6 @@ in
nix.trustedUsers = [ "hydra-queue-runner" ];
- services.hydra-dev.package = mkDefault ((import ./release.nix {}).build.x86_64-linux);
-
services.hydra-dev.extraConfig =
''
using_frontend_proxy = 1
diff --git a/release.nix b/release.nix
deleted file mode 100644
index a3e2ffb52..000000000
--- a/release.nix
+++ /dev/null
@@ -1,333 +0,0 @@
-{ hydraSrc ? builtins.fetchGit ./.
-, nixpkgs ? builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/release-19.09.tar.gz
-, officialRelease ? false
-, shell ? false
-}:
-
-with import (nixpkgs + "/lib");
-
-let
-
- pkgs = import nixpkgs {};
-
- genAttrs' = genAttrs [ "x86_64-linux" /* "i686-linux" */ ];
-
- hydraServer = hydraPkg:
- { config, pkgs, ... }:
- { imports = [ ./hydra-module.nix ];
-
- virtualisation.memorySize = 1024;
- virtualisation.writableStore = true;
-
- services.hydra-dev.enable = true;
- services.hydra-dev.package = hydraPkg;
- services.hydra-dev.hydraURL = "http://hydra.example.org";
- services.hydra-dev.notificationSender = "admin@hydra.example.org";
-
- services.postgresql.enable = true;
- services.postgresql.package = pkgs.postgresql95;
-
- environment.systemPackages = [ pkgs.perlPackages.LWP pkgs.perlPackages.JSON ];
-
- # The following is to work around the following error from hydra-server:
- # [error] Caught exception in engine "Cannot determine local time zone"
- time.timeZone = "UTC";
-
- nix = {
- # The following is to work around: https://github.com/NixOS/hydra/pull/432
- buildMachines = [
- { hostName = "localhost";
- system = "x86_64-linux";
- }
- ];
- # Without this nix tries to fetch packages from the default
- # cache.nixos.org which is not reachable from this sandboxed NixOS test.
- binaryCaches = [];
- };
- };
-
- version = builtins.readFile ./version + "." + toString hydraSrc.revCount + "." + hydraSrc.rev;
-
-in
-
-rec {
-
- build = genAttrs' (system:
- let pkgs = import nixpkgs { inherit system; }; in
-
- with pkgs;
-
- let
-
- nix = pkgs.nixUnstable or pkgs.nix;
-
- perlDeps = buildEnv {
- name = "hydra-perl-deps";
- paths = with perlPackages;
- [ ModulePluggable
- CatalystActionREST
- CatalystAuthenticationStoreDBIxClass
- CatalystDevel
- CatalystDispatchTypeRegex
- CatalystPluginAccessLog
- CatalystPluginAuthorizationRoles
- CatalystPluginCaptcha
- CatalystPluginSessionStateCookie
- CatalystPluginSessionStoreFastMmap
- CatalystPluginStackTrace
- CatalystPluginUnicodeEncoding
- CatalystTraitForRequestProxyBase
- CatalystViewDownload
- CatalystViewJSON
- CatalystViewTT
- CatalystXScriptServerStarman
- CatalystXRoleApplicator
- CryptRandPasswd
- DBDPg
- DBDSQLite
- DataDump
- DateTime
- DigestSHA1
- EmailMIME
- EmailSender
- FileSlurp
- IOCompress
- IPCRun
- JSON
- JSONAny
- JSONXS
- LWP
- LWPProtocolHttps
- NetAmazonS3
- NetPrometheus
- NetStatsd
- PadWalker
- Readonly
- SQLSplitStatement
- SetScalar
- Starman
- SysHostnameLong
- TermSizeAny
- TestMore
- TextDiff
- TextTable
- XMLSimple
- nix
- nix.perl-bindings
- git
- boehmgc
- ];
- };
-
- in
-
- releaseTools.nixBuild {
- name = "hydra-${version}";
-
- src = if shell then null else hydraSrc;
-
- buildInputs =
- [ makeWrapper autoconf automake libtool unzip nukeReferences pkgconfig sqlite libpqxx
- gitAndTools.topGit mercurial darcs subversion bazaar openssl bzip2 libxslt
- perlDeps perl nix
- postgresql95 # for running the tests
- boost
- (nlohmann_json.override { multipleHeaders = true; })
- ];
-
- hydraPath = lib.makeBinPath (
- [ sqlite subversion openssh nix coreutils findutils pixz
- gzip bzip2 lzma gnutar unzip git gitAndTools.topGit mercurial darcs gnused bazaar
- ] ++ lib.optionals stdenv.isLinux [ rpm dpkg cdrkit ] );
-
- postUnpack = optionalString (!shell) ''
- # Clean up when building from a working tree.
- (cd $sourceRoot && (git ls-files -o --directory | xargs -r rm -rfv)) || true
- '';
-
- configureFlags = [ "--with-docbook-xsl=${docbook_xsl}/xml/xsl/docbook" ];
-
- shellHook = ''
- PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-eval-jobs:$(pwd)/src/hydra-queue-runner:$PATH
- ${lib.optionalString shell "PERL5LIB=$(pwd)/src/lib:$PERL5LIB"}
- '';
-
- preConfigure = "autoreconf -vfi";
-
- NIX_LDFLAGS = [ "-lpthread" ];
-
- enableParallelBuilding = true;
-
- preCheck = ''
- patchShebangs .
- export LOGNAME=''${LOGNAME:-foo}
- '';
-
- postInstall = ''
- mkdir -p $out/nix-support
-
- for i in $out/bin/*; do
- read -n 4 chars < $i
- if [[ $chars =~ ELF ]]; then continue; fi
- wrapProgram $i \
- --prefix PERL5LIB ':' $out/libexec/hydra/lib:$PERL5LIB \
- --prefix PATH ':' $out/bin:$hydraPath \
- --set HYDRA_RELEASE ${version} \
- --set HYDRA_HOME $out/libexec/hydra \
- --set NIX_RELEASE ${nix.name or "unknown"}
- done
- ''; # */
-
- dontStrip = true;
-
- meta.description = "Build of Hydra on ${system}";
- passthru.perlDeps = perlDeps;
- });
-
- manual = pkgs.runCommand "hydra-manual-${version}"
- { build = build.x86_64-linux;
- }
- ''
- mkdir -p $out/share
- cp -prvd $build/share/doc $out/share/
-
- mkdir $out/nix-support
- echo "doc manual $out/share/doc/hydra" >> $out/nix-support/hydra-build-products
- '';
-
- tests.install = genAttrs' (system:
- with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; };
- simpleTest {
- machine = hydraServer build.${system};
- testScript =
- ''
- $machine->waitForJob("hydra-init");
- $machine->waitForJob("hydra-server");
- $machine->waitForJob("hydra-evaluator");
- $machine->waitForJob("hydra-queue-runner");
- $machine->waitForOpenPort("3000");
- $machine->succeed("curl --fail http://localhost:3000/");
- '';
- });
-
- tests.api = genAttrs' (system:
- with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; };
- simpleTest {
- machine = hydraServer build.${system};
- testScript =
- let dbi = "dbi:Pg:dbname=hydra;user=root;"; in
- ''
- $machine->waitForJob("hydra-init");
-
- # Create an admin account and some other state.
- $machine->succeed
- ( "su - hydra -c \"hydra-create-user root --email-address 'alice\@example.org' --password foobar --role admin\""
- , "mkdir /run/jobset /tmp/nix"
- , "chmod 755 /run/jobset /tmp/nix"
- , "cp ${./tests/api-test.nix} /run/jobset/default.nix"
- , "chmod 644 /run/jobset/default.nix"
- , "chown -R hydra /run/jobset /tmp/nix"
- );
-
- $machine->succeed("systemctl stop hydra-evaluator hydra-queue-runner");
- $machine->waitForJob("hydra-server");
- $machine->waitForOpenPort("3000");
-
- # Run the API tests.
- $machine->mustSucceed("su - hydra -c 'perl -I ${build.${system}.perlDeps}/lib/perl5/site_perl ${./tests/api-test.pl}' >&2");
- '';
- });
-
- tests.notifications = genAttrs' (system:
- with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; };
- simpleTest {
- machine = { pkgs, ... }: {
- imports = [ (hydraServer build.${system}) ];
- services.hydra-dev.extraConfig = ''
-
- url = http://127.0.0.1:8086
- db = hydra
-
- '';
- services.influxdb.enable = true;
- };
- testScript = ''
- $machine->waitForJob("hydra-init");
-
- # Create an admin account and some other state.
- $machine->succeed
- ( "su - hydra -c \"hydra-create-user root --email-address 'alice\@example.org' --password foobar --role admin\""
- , "mkdir /run/jobset"
- , "chmod 755 /run/jobset"
- , "cp ${./tests/api-test.nix} /run/jobset/default.nix"
- , "chmod 644 /run/jobset/default.nix"
- , "chown -R hydra /run/jobset"
- );
-
- # Wait until InfluxDB can receive web requests
- $machine->waitForJob("influxdb");
- $machine->waitForOpenPort("8086");
-
- # Create an InfluxDB database where hydra will write to
- $machine->succeed(
- "curl -XPOST 'http://127.0.0.1:8086/query' \\
- --data-urlencode 'q=CREATE DATABASE hydra'");
-
- # Wait until hydra-server can receive HTTP requests
- $machine->waitForJob("hydra-server");
- $machine->waitForOpenPort("3000");
-
- # Setup the project and jobset
- $machine->mustSucceed(
- "su - hydra -c 'perl -I ${build.${system}.perlDeps}/lib/perl5/site_perl ${./tests/setup-notifications-jobset.pl}' >&2");
-
- # Wait until hydra has build the job and
- # the InfluxDBNotification plugin uploaded its notification to InfluxDB
- $machine->waitUntilSucceeds(
- "curl -s -H 'Accept: application/csv' \\
- -G 'http://127.0.0.1:8086/query?db=hydra' \\
- --data-urlencode 'q=SELECT * FROM hydra_build_status' | grep success");
- '';
- });
-
- /*
- tests.s3backup = genAttrs' (system:
- with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; };
- let hydra = build.${system}
- simpleTest {
- machine =
- { config, pkgs, ... }:
- { services.postgresql.enable = true;
- services.postgresql.package = pkgs.postgresql95;
- environment.systemPackages = [ hydra pkgs.rubyLibs.fakes3 ];
- virtualisation.memorySize = 2047;
- boot.kernelPackages = pkgs.linuxPackages_3_10;
- virtualisation.writableStore = true;
- networking.extraHosts = ''
- 127.0.0.1 hydra.s3.amazonaws.com
- '';
- };
-
- testScript =
- ''
- $machine->waitForJob("postgresql");
-
- # Initialise the database and the state.
- $machine->succeed
- ( "createdb -O root hydra"
- , "psql hydra -f ${hydra}/libexec/hydra/sql/hydra-postgresql.sql"
- , "mkdir /var/lib/hydra"
- , "mkdir /tmp/jobs"
- , "cp ${./tests/s3-backup-test.pl} /tmp/s3-backup-test.pl"
- , "cp ${./tests/api-test.nix} /tmp/jobs/default.nix"
- );
-
- # start fakes3
- $machine->succeed("fakes3 --root /tmp/s3 --port 80 &>/dev/null &");
- $machine->waitForOpenPort("80");
-
- $machine->succeed("cd /tmp && LOGNAME=root AWS_ACCESS_KEY_ID=foo AWS_SECRET_ACCESS_KEY=bar HYDRA_DBI='dbi:Pg:dbname=hydra;user=root;' HYDRA_CONFIG=${./tests/s3-backup-test.config} perl -I ${hydra}/libexec/hydra/lib -I ${hydra.perlDeps}/lib/perl5/site_perl ./s3-backup-test.pl >&2");
- '';
- });
- */
-}
diff --git a/shell.nix b/shell.nix
index 454c00bff..db84e3d07 100644
--- a/shell.nix
+++ b/shell.nix
@@ -1 +1,3 @@
-(import ./release.nix { shell = true; }).build.x86_64-linux
+(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
+ src = builtins.fetchGit ./.;
+}).shellNix
diff --git a/src/hydra-eval-jobs/hydra-eval-jobs.cc b/src/hydra-eval-jobs/hydra-eval-jobs.cc
index de994b396..f3367e11a 100644
--- a/src/hydra-eval-jobs/hydra-eval-jobs.cc
+++ b/src/hydra-eval-jobs/hydra-eval-jobs.cc
@@ -9,6 +9,8 @@
#include "get-drvs.hh"
#include "globals.hh"
#include "common-eval-args.hh"
+#include "flake/flakeref.hh"
+#include "flake/flake.hh"
#include "attr-path.hh"
#include "derivations.hh"
@@ -28,6 +30,7 @@ static size_t maxMemorySize;
struct MyArgs : MixEvalArgs, MixCommonArgs
{
Path releaseExpr;
+ bool flake = false;
bool dryRun = false;
MyArgs() : MixCommonArgs("hydra-eval-jobs")
@@ -51,6 +54,11 @@ struct MyArgs : MixEvalArgs, MixCommonArgs
.description("don't create store derivations")
.set(&dryRun, true);
+ mkFlag()
+ .longName("flake")
+ .description("build a flake")
+ .set(&flake, true);
+
expectArg("expr", &releaseExpr);
}
};
@@ -89,7 +97,37 @@ static void worker(
AutoCloseFD & from)
{
Value vTop;
- state.evalFile(lookupFileArg(state, myArgs.releaseExpr), vTop);
+
+ if (myArgs.flake) {
+ using namespace flake;
+
+ auto flakeRef = parseFlakeRef(myArgs.releaseExpr);
+
+ auto vFlake = state.allocValue();
+
+ auto lockedFlake = lockFlake(state, flakeRef,
+ LockFlags {
+ .updateLockFile = false,
+ .useRegistries = false,
+ .allowMutable = false,
+ });
+
+ callFlake(state, lockedFlake, *vFlake);
+
+ auto vOutputs = vFlake->attrs->get(state.symbols.create("outputs"))->value;
+ state.forceValue(*vOutputs);
+
+ auto aHydraJobs = vOutputs->attrs->get(state.symbols.create("hydraJobs"));
+ if (!aHydraJobs)
+ aHydraJobs = vOutputs->attrs->get(state.symbols.create("checks"));
+ if (!aHydraJobs)
+ throw Error("flake '%s' does not provide any Hydra jobs or checks", flakeRef);
+
+ vTop = *aHydraJobs->value;
+
+ } else {
+ state.evalFile(lookupFileArg(state, myArgs.releaseExpr), vTop);
+ }
auto vRoot = state.allocValue();
state.autoCallFunction(autoArgs, vTop, *vRoot);
@@ -109,7 +147,7 @@ static void worker(
nlohmann::json reply;
try {
- auto vTmp = findAlongAttrPath(state, attrPath, autoArgs, *vRoot);
+ auto vTmp = findAlongAttrPath(state, attrPath, autoArgs, *vRoot).first;
auto v = state.allocValue();
state.autoCallFunction(autoArgs, *vTmp, *v);
@@ -139,23 +177,23 @@ static void worker(
/* If this is an aggregate, then get its constituents. */
auto a = v->attrs->get(state.symbols.create("_hydraAggregate"));
- if (a && state.forceBool(*(*a)->value, *(*a)->pos)) {
+ if (a && state.forceBool(*a->value, *a->pos)) {
auto a = v->attrs->get(state.symbols.create("constituents"));
if (!a)
throw EvalError("derivation must have a ‘constituents’ attribute");
PathSet context;
- state.coerceToString(*(*a)->pos, *(*a)->value, context, true, false);
+ state.coerceToString(*a->pos, *a->value, context, true, false);
for (auto & i : context)
if (i.at(0) == '!') {
size_t index = i.find("!", 1);
job["constituents"].push_back(string(i, index + 1));
}
- state.forceList(*(*a)->value, *(*a)->pos);
- for (unsigned int n = 0; n < (*a)->value->listSize(); ++n) {
- auto v = (*a)->value->listElems()[n];
+ state.forceList(*a->value, *a->pos);
+ for (unsigned int n = 0; n < a->value->listSize(); ++n) {
+ auto v = a->value->listElems()[n];
state.forceValue(*v);
if (v->type == tString)
job["namedConstituents"].push_back(state.forceStringNoCtx(*v));
@@ -245,6 +283,10 @@ int main(int argc, char * * argv)
to the environment. */
evalSettings.restrictEval = true;
+ /* When building a flake, use pure evaluation (no access to
+ 'getEnv', 'currentSystem' etc. */
+ evalSettings.pureEval = myArgs.flake;
+
if (myArgs.dryRun) settings.readOnlyMode = true;
if (myArgs.releaseExpr == "") throw UsageError("no expression specified");
diff --git a/src/hydra-evaluator/hydra-evaluator.cc b/src/hydra-evaluator/hydra-evaluator.cc
index e6fc9a145..a2552b8a5 100644
--- a/src/hydra-evaluator/hydra-evaluator.cc
+++ b/src/hydra-evaluator/hydra-evaluator.cc
@@ -103,7 +103,7 @@ struct Evaluator
}
if (evalOne && seen.empty()) {
- printError("the specified jobset does not exist");
+ printError("the specified jobset does not exist or is disabled");
std::_Exit(1);
}
@@ -458,14 +458,15 @@ int main(int argc, char * * argv)
return true;
});
- if (!args.empty()) {
- if (args.size() != 2) throw UsageError("Syntax: hydra-evaluator [ ]");
- evaluator.evalOne = JobsetName(args[0], args[1]);
- }
if (unlock)
evaluator.unlock();
- else
+ else {
+ if (!args.empty()) {
+ if (args.size() != 2) throw UsageError("Syntax: hydra-evaluator [ ]");
+ evaluator.evalOne = JobsetName(args[0], args[1]);
+ }
evaluator.run();
+ }
});
}
diff --git a/src/lib/Hydra/Controller/Jobset.pm b/src/lib/Hydra/Controller/Jobset.pm
index 5ce4aab4d..b4c20e03f 100644
--- a/src/lib/Hydra/Controller/Jobset.pm
+++ b/src/lib/Hydra/Controller/Jobset.pm
@@ -223,7 +223,19 @@ sub updateJobset {
error($c, "Cannot rename jobset to ‘$jobsetName’ since that identifier is already taken.")
if $jobsetName ne $oldName && defined $c->stash->{project}->jobsets->find({ name => $jobsetName });
- my ($nixExprPath, $nixExprInput) = nixExprPathFromParams $c;
+ my $type = int($c->stash->{params}->{"type"}) // 0;
+
+ my ($nixExprPath, $nixExprInput);
+ my $flake;
+
+ if ($type == 0) {
+ ($nixExprPath, $nixExprInput) = nixExprPathFromParams $c;
+ } elsif ($type == 1) {
+ $flake = trim($c->stash->{params}->{"flakeref"});
+ error($c, "Invalid flake URI ‘$flake’.") if $flake !~ /^[a-zA-Z]/;
+ } else {
+ error($c, "Invalid jobset type.");
+ }
my $enabled = int($c->stash->{params}->{enabled});
die if $enabled < 0 || $enabled > 3;
@@ -246,6 +258,8 @@ sub updateJobset {
, checkinterval => $checkinterval
, triggertime => ($enabled && $checkinterval > 0) ? $jobset->triggertime // time() : undef
, schedulingshares => $shares
+ , type => $type
+ , flake => $flake
});
$jobset->project->jobsetrenames->search({ from_ => $jobsetName })->delete;
@@ -255,23 +269,25 @@ sub updateJobset {
# Set the inputs of this jobset.
$jobset->jobsetinputs->delete;
- foreach my $name (keys %{$c->stash->{params}->{inputs}}) {
- my $inputData = $c->stash->{params}->{inputs}->{$name};
- my $type = $inputData->{type};
- my $value = $inputData->{value};
- my $emailresponsible = defined $inputData->{emailresponsible} ? 1 : 0;
+ if ($type == 0) {
+ foreach my $name (keys %{$c->stash->{params}->{inputs}}) {
+ my $inputData = $c->stash->{params}->{inputs}->{$name};
+ my $type = $inputData->{type};
+ my $value = $inputData->{value};
+ my $emailresponsible = defined $inputData->{emailresponsible} ? 1 : 0;
- error($c, "Invalid input name ‘$name’.") unless $name =~ /^[[:alpha:]][\w-]*$/;
- error($c, "Invalid input type ‘$type’.") unless defined $c->stash->{inputTypes}->{$type};
+ error($c, "Invalid input name ‘$name’.") unless $name =~ /^[[:alpha:]][\w-]*$/;
+ error($c, "Invalid input type ‘$type’.") unless defined $c->stash->{inputTypes}->{$type};
- my $input = $jobset->jobsetinputs->create(
- { name => $name,
- type => $type,
- emailresponsible => $emailresponsible
- });
+ my $input = $jobset->jobsetinputs->create(
+ { name => $name,
+ type => $type,
+ emailresponsible => $emailresponsible
+ });
- $value = checkInputValue($c, $name, $type, $value);
- $input->jobsetinputalts->create({altnr => 0, value => $value});
+ $value = checkInputValue($c, $name, $type, $value);
+ $input->jobsetinputalts->create({altnr => 0, value => $value});
+ }
}
}
diff --git a/src/lib/Hydra/Helper/AddBuilds.pm b/src/lib/Hydra/Helper/AddBuilds.pm
index 296afbc54..394cd42a5 100644
--- a/src/lib/Hydra/Helper/AddBuilds.pm
+++ b/src/lib/Hydra/Helper/AddBuilds.pm
@@ -30,6 +30,8 @@ sub updateDeclarativeJobset {
my @allowed_keys = qw(
enabled
hidden
+ type
+ flake
description
nixexprinput
nixexprpath
diff --git a/src/root/build.tt b/src/root/build.tt
index 1cd6da923..a18ad16e8 100644
--- a/src/root/build.tt
+++ b/src/root/build.tt
@@ -120,7 +120,7 @@ END;