diff --git a/t/10-pluggable.t b/t/10-pluggable.t index 67109a4b..f85967a9 100644 --- a/t/10-pluggable.t +++ b/t/10-pluggable.t @@ -2,28 +2,27 @@ use strict; use warnings; use Test::More tests => 16; use Test::Exception; -use Cwd; -use List::Util qw(none any); +use Cwd qw(getcwd abs_path); +use List::Util qw(any); use Log::Log4perl qw(:levels); -use File::Copy qw(cp); -use English; - -use npg_tracking::util::abs_path qw(abs_path); -use t::util; +use File::Temp qw(tempdir); +use English qw(-no_match_vars); +use File::Copy::Recursive qw(dircopy fmove fcopy); use_ok('npg_pipeline::pluggable'); -my $util = t::util->new(); -my $test_dir = $util->temp_directory(); +my $test_dir = tempdir(CLEANUP => 1); -my @tools = map { "$test_dir/$_" } qw/bamtofastq blat norm_fit/; +my $test_bin = join q[/], $test_dir, q[bin]; +mkdir $test_bin; +my @tools = map { "$test_bin/$_" } qw/bamtofastq blat norm_fit/; foreach my $tool (@tools) { open my $fh, '>', $tool or die 'cannot open file for writing'; print $fh $tool or die 'cannot print'; close $fh or warn 'failed to close file handle'; } chmod 0755, @tools; -local $ENV{'PATH'} = join q[:], $test_dir, $ENV{'PATH'}; +local $ENV{'PATH'} = join q[:], $test_bin, $ENV{'PATH'}; Log::Log4perl->easy_init({layout => '%d %-5p %c - %m%n', level => $DEBUG, @@ -32,15 +31,36 @@ Log::Log4perl->easy_init({layout => '%d %-5p %c - %m%n', my $product_config = q[t/data/release/config/archive_on/product_release.yml]; my $config_dir = 'data/config_files'; -my $runfolder_path = $util->analysis_runfolder_path; -$util->create_analysis(); + +my $rf_name = q[210415_A00971_0162_AHNNTMDSXY]; +my $test_rf = q[t/data/novaseq/] . $rf_name; +my $analysis_dir = join q[/], $test_dir, + q[esa-sv-20201215-02/IL_seq_data/analysis]; +my $runfolder_path = join q[/], $analysis_dir, $rf_name; +dircopy($test_rf, $runfolder_path); +my $bbcals_relative = q[Data/Intensities/BAM_basecalls_20210417-080715]; +my $nocall_relative = $bbcals_relative . q[/no_cal]; +my $nocall_path = join q[/], $runfolder_path, $nocall_relative; +mkdir $nocall_path; +symlink $nocall_path, "$runfolder_path/Latest_Summary"; + +my $id_run = 37416; +for my $file (qw(RunInfo.xml RunParameters.xml)) { + my $source = join q[/], $runfolder_path, "${id_run}_${file}"; + my $target = join q[/], $runfolder_path, $file; + fmove($source, $target); +} + +my $samplesheet_path = join q[/], $runfolder_path, $bbcals_relative, + q[metadata_cache_37416], q[samplesheet_37416.csv]; subtest 'object with no function order set - simple methods' => sub { plan tests => 7; my $pluggable = npg_pipeline::pluggable->new( - id_run => 1234, - runfolder_path => $test_dir + id_run => 1234, + runfolder_path => $test_dir, + npg_tracking_schema => undef ); isa_ok($pluggable, q{npg_pipeline::pluggable}); is($pluggable->_pipeline_name, '10-pluggable.t', 'pipeline name'); @@ -56,17 +76,19 @@ subtest 'graph creation from jgf files' => sub { plan tests => 2; my $obj = npg_pipeline::pluggable->new( - id_run => 1234, - runfolder_path => $test_dir, - function_list => "$config_dir/function_list_central.json" + id_run => 1234, + runfolder_path => $test_dir, + function_list => "$config_dir/function_list_central.json", + npg_tracking_schema => undef ); lives_ok {$obj->function_graph()} 'no error creating a graph for default analysis'; $obj = npg_pipeline::pluggable->new( - id_run => 1234, - runfolder_path => $test_dir, - function_list => "$config_dir/function_list_post_qc_review.json" + id_run => 1234, + runfolder_path => $test_dir, + function_list => "$config_dir/function_list_post_qc_review.json", + npg_tracking_schema => undef ); lives_ok { $obj->function_graph() } 'no error creating a graph for default archival'; @@ -79,7 +101,8 @@ subtest 'graph creation from explicitly given function list' => sub { id_run => 1234, runfolder_path => $runfolder_path, function_order => [qw/run_analysis_in_progress lane_analysis_in_progress/], - function_list => "$config_dir/function_list_central.json" + function_list => "$config_dir/function_list_central.json", + npg_tracking_schema => undef ); ok($obj->has_function_order(), 'function order is set'); is(join(q[ ], @{$obj->function_order}), 'run_analysis_in_progress lane_analysis_in_progress', @@ -110,31 +133,32 @@ subtest 'graph creation from explicitly given function list' => sub { is (scalar @p, 1, 'one predecessor'); $obj = npg_pipeline::pluggable->new( - id_run => 1234, - function_order => [qw/pipeline_end/], - runfolder_path => $test_dir, - function_list => "$config_dir/function_list_central.json" + id_run => 1234, + function_order => [qw/pipeline_end/], + runfolder_path => $test_dir, + function_list => "$config_dir/function_list_central.json", + npg_tracking_schema => undef ); throws_ok { $obj->function_graph() } qr/Graph is not DAG/, 'pipeline_end cannot be specified in function order'; $obj = npg_pipeline::pluggable->new( - id_run => 1234, - function_order => [qw/pipeline_start/], - runfolder_path => $test_dir, - no_bsub => 1, - function_list => "$config_dir/function_list_central.json" + function_order => [qw/pipeline_start/], + runfolder_path => $test_dir, + no_bsub => 1, + function_list => "$config_dir/function_list_central.json", + npg_tracking_schema => undef ); throws_ok { $obj->function_graph() } qr/Graph is not DAG/, 'pipeline_start cannot be specified in function order'; $obj = npg_pipeline::pluggable->new( - id_run => 1234, - function_order => ['invalid_function'], - runfolder_path => $test_dir, - function_list => "$config_dir/function_list_central.json" + function_order => ['invalid_function'], + runfolder_path => $test_dir, + function_list => "$config_dir/function_list_central.json", + npg_tracking_schema => undef ); throws_ok {$obj->function_graph()} qr/Function invalid_function cannot be found in the graph/; @@ -147,7 +171,8 @@ subtest 'switching off functions' => sub { runfolder_path => $runfolder_path, no_irods_archival => 1, no_warehouse_update => 1, - function_list => "$config_dir/function_list_central.json" + function_list => "$config_dir/function_list_central.json", + npg_tracking_schema => undef ); lives_ok { $p->function_graph } 'A graph!'; @@ -164,9 +189,10 @@ subtest 'switching off functions' => sub { 'update to warehouse switched off'); $p = npg_pipeline::pluggable->new( - runfolder_path => $runfolder_path, - local => 1, - function_list => "$config_dir/function_list_central.json" + runfolder_path => $runfolder_path, + local => 1, + function_list => "$config_dir/function_list_central.json", + npg_tracking_schema => undef ); ok(($p->_run_function($fn_name_id, $fn_name_id)->[0]->excluded && $p->_run_function($fn_ml_name_id, $fn_ml_name_id)->[0]->excluded), @@ -177,7 +203,8 @@ subtest 'switching off functions' => sub { runfolder_path => $runfolder_path, local => 1, no_warehouse_update => 0, - function_list => "$config_dir/function_list_central.json" + function_list => "$config_dir/function_list_central.json", + npg_tracking_schema => undef ); ok(($p->_run_function($fn_name_id, $fn_name_id)->[0]->excluded && $p->_run_function($fn_ml_name_id, $fn_ml_name_id)->[0]->excluded), @@ -196,18 +223,19 @@ subtest 'specifying functions via function_order' => sub { ); local $ENV{'PATH'} = join q[:], 't/bin', $ENV{'PATH'}; # mock LSF clients - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; my $p = npg_pipeline::pluggable->new( - function_order => \@functions_in_order, - runfolder_path => $runfolder_path, - spider => 0, - no_sf_resource => 1, - no_bsub => 0, - is_indexed => 0, + function_order => \@functions_in_order, + runfolder_path => $runfolder_path, + spider => 0, + no_sf_resource => 1, + no_bsub => 0, + is_indexed => 0, product_conf_file_path => $product_config, - function_list => "$config_dir/function_list_post_qc_review.json" + function_list => "$config_dir/function_list_post_qc_review.json", + npg_tracking_schema => undef ); - is($p->id_run, 1234, 'run id set correctly'); + is($p->id_run, $id_run, 'run id set correctly'); is($p->is_indexed, 0, 'is not indexed'); is(join(q[ ], @{$p->function_order()}), join(q[ ], @functions_in_order), q{function_order set on creation}); @@ -218,12 +246,13 @@ subtest 'creating executor object' => sub { plan tests => 13; my $ref = { - function_order => [qw/run_archival_in_progress/], - runfolder_path => $runfolder_path, - bam_basecall_path => $runfolder_path, - spider => 0, + function_order => [qw/run_archival_in_progress/], + runfolder_path => $runfolder_path, + bam_basecall_path => $runfolder_path, + spider => 0, product_conf_file_path => $product_config, - function_list => "$config_dir/function_list_post_qc_review.json" + function_list => "$config_dir/function_list_post_qc_review.json", + npg_tracking_schema => undef }; my $p = npg_pipeline::pluggable->new($ref); @@ -246,7 +275,8 @@ subtest 'creating executor object' => sub { my $ex = $pl->executor(); isa_ok ($ex, 'npg_pipeline::executor::' . $etype); ok (!$ex->has_analysis_path, 'analysis path is not set'); - my $path1 = join q[],$runfolder_path,'/t_10-pluggable.t_1234_',$pl->timestamp, q[-]; + my $path1 = join q[],$runfolder_path, "/t_10-pluggable.t_${id_run}_", + $pl->timestamp, q[-]; my $path2 = join q[], '.commands4', uc $etype, 'jobs.', $etype eq 'lsf' ? 'json' : 'txt'; like ($ex->commands4jobs_file_path(), qr/\A$path1(\d+)$path2\Z/, 'file path to save commands for jobs'); @@ -263,7 +293,7 @@ subtest 'propagating options to the lsf executor' => sub { run_qc_complete ); - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; my $ref = { function_order => \@functions_in_order, @@ -324,19 +354,19 @@ subtest 'running the pipeline (lsf executor)' => sub { }; my $p = npg_pipeline::pluggable->new($ref); - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; lives_ok { $p->main(); } q{no error running main without execution }; $ref->{'execute'} = 1; $ref->{'no_bsub'} = 1; $p = npg_pipeline::pluggable->new($ref); - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; lives_ok { $p->main(); } q{no error running main in no_bsub mode}; $ref->{'no_bsub'} = 0; local $ENV{'PATH'} = join q[:], 't/bin', $ENV{'PATH'}; # mock LSF clients $p = npg_pipeline::pluggable->new($ref); - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; lives_ok { $p->main(); } q{no error running main with mock LSF client}; # soft-link bresume command to /bin/false so that it fails @@ -344,19 +374,19 @@ subtest 'running the pipeline (lsf executor)' => sub { mkdir $bin; symlink '/bin/false', "$bin/bresume"; local $ENV{'PATH'} = join q[:], $bin, $ENV{'PATH'}; - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; throws_ok { npg_pipeline::pluggable->new($ref)->main() } qr/Failed to submit command to LSF/, q{error running main}; $ref->{'interactive'} = 1; - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; lives_ok { npg_pipeline::pluggable->new($ref)->main() } 'no failure in interactive mode'; $ref->{'interactive'} = 0; # soft-link bkill command to /bin/false so that it fails symlink '/bin/false', "$bin/bkill"; - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; throws_ok { npg_pipeline::pluggable->new($ref)->main() } qr/Failed to submit command to LSF/, q{error running main}; }; @@ -389,17 +419,17 @@ subtest 'running the pipeline (wr executor)' => sub { local $ENV{'PATH'} = join q[:], $bin, $ENV{'PATH'}; my $p = npg_pipeline::pluggable->new($ref); - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; lives_ok { $p->main(); } q{no error running main without execution }; $ref->{'execute'} = 1; - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; throws_ok { npg_pipeline::pluggable->new($ref)->main() } qr/Error submitting for execution: Error submitting wr jobs/, q{error running main}; $ref->{'interactive'} = 1; - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; lives_ok { npg_pipeline::pluggable->new($ref)->main() } q{interactive mode, no error running main}; @@ -407,11 +437,11 @@ subtest 'running the pipeline (wr executor)' => sub { unlink $wr; symlink '/bin/true', $wr; $ref->{'interactive'} = 0; - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; lives_ok { npg_pipeline::pluggable->new($ref)->main() } q{no error running main}; $ref->{'job_name_prefix'} = 'test'; - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; lives_ok { npg_pipeline::pluggable->new($ref)->main() } q{job name prefix is set, no error running main}; }; @@ -419,80 +449,61 @@ subtest 'running the pipeline (wr executor)' => sub { subtest 'positions and spidering' => sub { plan tests => 9; - cp 't/data/run_params/runParameters.hiseq.xml', - join(q[/], $runfolder_path, 'runParameters.xml') - or die 'Faile to copy run params file'; - local $ENV{'PATH'} = join q[:], 't/bin', $ENV{'PATH'}; # mock LSF clients - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; my $p = npg_pipeline::pluggable->new( - id_run => 1234, - id_flowcell_lims => 2015, - run_folder => q{123456_IL2_1234}, - runfolder_path => $runfolder_path, - spider => 0, - function_list => "$config_dir/function_list_central.json" + id_flowcell_lims => 2015, + runfolder_path => $runfolder_path, + spider => 0, + function_list => "$config_dir/function_list_central.json" ); ok(!$p->spider, 'spidering is off'); - is (join( q[ ], $p->positions), '1 2 3 4 5 6 7 8', 'positions array'); + is (join( q[ ], $p->positions), '1 2 3 4', 'positions array'); - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; my $function = 'run_analysis_complete'; $p = npg_pipeline::pluggable->new( - id_run => 1234, - id_flowcell_lims => 2015, - run_folder => q{123456_IL2_1234}, - function_order => [$function], - runfolder_path => $runfolder_path, - lanes => [1,2], - spider => 0, - no_sf_resource => 1, - product_conf_file_path => $product_config, - function_list => "$config_dir/function_list_central.json" + id_flowcell_lims => 2015, + function_order => [$function], + runfolder_path => $runfolder_path, + lanes => [1,2], + spider => 0, + no_sf_resource => 1, + product_conf_file_path => $product_config, + function_list => "$config_dir/function_list_central.json" ); is (join( q[ ], $p->positions), '1 2', 'positions array'); ok(!$p->interactive, 'start job will be resumed'); lives_ok { $p->main() } "running main for $function, non-interactively"; - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; $p = npg_pipeline::pluggable->new( - id_run => 1234, - id_flowcell_lims => 2015, - run_folder => q{123456_IL2_1234}, - function_order => [$function], - runfolder_path => $runfolder_path, - lanes => [1,2], - interactive => 1, - spider => 0, - no_sf_resource => 1, - product_conf_file_path => $product_config, - function_list => "$config_dir/function_list_central.json" + id_flowcell_lims => 2015, + function_order => [$function], + runfolder_path => $runfolder_path, + lanes => [1,2], + interactive => 1, + spider => 0, + no_sf_resource => 1, + product_conf_file_path => $product_config, + function_list => "$config_dir/function_list_central.json" ); ok($p->interactive, 'start job will not be resumed'); lives_ok { $p->main() } "running main for $function, interactively"; - local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv]; - $util->create_analysis(); - cp 't/data/run_params/runParameters.hiseq.xml', - join(q[/], $runfolder_path, 'runParameters.xml') - or die 'Faile to copy run params file'; - - $util->create_run_info(); - + local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path; $p = npg_pipeline::pluggable->new( - id_run => 1234, - run_folder => q{123456_IL2_1234}, - function_order => [qw{qc_qX_yield qc_adapter qc_insert_size}], - lanes => [4], - runfolder_path => $runfolder_path, - no_bsub => 1, - repository => q{t/data/sequence}, - id_flowcell_lims => 2015, - spider => 0, - no_sf_resource => 1, - product_conf_file_path => $product_config, - function_list => "$config_dir/function_list_central.json" + function_order => [qw{qc_qX_yield qc_adapter qc_insert_size}], + lanes => [4], + runfolder_path => $runfolder_path, + no_bsub => 1, + repository => q{t/data/sequence}, + id_flowcell_lims => 2015, + spider => 0, + no_sf_resource => 1, + product_conf_file_path => $product_config, + function_list => "$config_dir/function_list_central.json" ); mkdir $p->archive_path; is (join( q[ ], $p->positions), '4', 'positions array'); @@ -542,7 +553,7 @@ subtest 'script name, pipeline name and function list' => sub { qr/Bad function list name: $test_path/, 'error when function list does not exist, neither it can be interpreted as a function list name'; - cp $path, $test_dir; + fcopy($path, $test_dir); $path = $test_dir . '/function_list_post_qc_review.json'; $base = npg_pipeline::pluggable->new(function_list => $path); @@ -563,13 +574,11 @@ subtest 'script name, pipeline name and function list' => sub { subtest 'log file name, directory and path' => sub { plan tests => 18; - my $log_name_re = qr/t_10-pluggable\.t_1234_02122020-\d+\.log/; + my $log_name_re = qr/t_10-pluggable\.t_${id_run}_02122020-\d+\.log/; my $p = npg_pipeline::pluggable->new( - id_run => 1234, - run_folder => q{123456_IL2_1234}, - runfolder_path => $runfolder_path, - timestamp => '02122020', + runfolder_path => $runfolder_path, + timestamp => '02122020', ); like ($p->log_file_name, $log_name_re, 'log file name is built correctly'); is ($p->log_file_dir, $runfolder_path, 'default for the log directory'); @@ -577,11 +586,9 @@ subtest 'log file name, directory and path' => sub { 'default log file path'); $p = npg_pipeline::pluggable->new( - id_run => 1234, - run_folder => q{123456_IL2_1234}, - runfolder_path => $runfolder_path, - timestamp => '02122020', - log_file_name => 'custom.log', + runfolder_path => $runfolder_path, + timestamp => '02122020', + log_file_name => 'custom.log', ); is ($p->log_file_name, 'custom.log', 'log file name as set'); is ($p->log_file_dir, $runfolder_path, 'default for the log directory'); @@ -589,11 +596,9 @@ subtest 'log file name, directory and path' => sub { 'custom log file path'); $p = npg_pipeline::pluggable->new( - id_run => 1234, - run_folder => q{123456_IL2_1234}, - runfolder_path => $runfolder_path, - timestamp => '02122020', - log_file_dir => "$runfolder_path/custom", + runfolder_path => $runfolder_path, + timestamp => '02122020', + log_file_dir => "$runfolder_path/custom", ); like ($p->log_file_name, $log_name_re, 'default log file name'); is ($p->log_file_dir, "$runfolder_path/custom", 'log directory as set'); @@ -601,12 +606,10 @@ subtest 'log file name, directory and path' => sub { 'custom log file path'); $p = npg_pipeline::pluggable->new( - id_run => 1234, - run_folder => q{123456_IL2_1234}, - runfolder_path => $runfolder_path, - timestamp => '02122020', - log_file_dir => "$runfolder_path/custom", - log_file_name => 'custom.log', + runfolder_path => $runfolder_path, + timestamp => '02122020', + log_file_dir => "$runfolder_path/custom", + log_file_name => 'custom.log', ); is ($p->log_file_name, 'custom.log', 'log file name as set'); is ($p->log_file_dir, "$runfolder_path/custom" , 'log directory as set'); @@ -615,13 +618,11 @@ subtest 'log file name, directory and path' => sub { # setting all three does not make sense, but is not prohibited either $p = npg_pipeline::pluggable->new( - id_run => 1234, - run_folder => q{123456_IL2_1234}, - runfolder_path => $runfolder_path, - timestamp => '02122020', - log_file_dir => "$runfolder_path/my_log", - log_file_name => 'custom.log', - log_file_path => "$runfolder_path/custom/my.log", + runfolder_path => $runfolder_path, + timestamp => '02122020', + log_file_dir => "$runfolder_path/my_log", + log_file_name => 'custom.log', + log_file_path => "$runfolder_path/custom/my.log", ); is ($p->log_file_name, 'custom.log', 'log file name as set'); is ($p->log_file_dir, "$runfolder_path/my_log", 'log directory as set'); @@ -629,11 +630,9 @@ subtest 'log file name, directory and path' => sub { 'custom log file path as directly set'); $p = npg_pipeline::pluggable->new( - id_run => 1234, - run_folder => q{123456_IL2_1234}, - runfolder_path => $runfolder_path, - timestamp => '02122020', - log_file_path => "$runfolder_path/custom/my.log" + runfolder_path => $runfolder_path, + timestamp => '02122020', + log_file_path => "$runfolder_path/custom/my.log" ); is ($p->log_file_name, 'my.log', 'log file name is derived from path'); is ($p->log_file_dir, "$runfolder_path/custom", @@ -646,13 +645,11 @@ subtest 'Copy log file and product_release config' => sub { plan tests => 7; my $p = npg_pipeline::pluggable->new( - id_run => 1234, - run_folder => q{123456_IL2_1234}, - runfolder_path => $runfolder_path, - timestamp => '02122020', - log_file_dir => $test_dir, - log_file_name => 'logfile', - product_conf_file_path => $product_config + runfolder_path => $runfolder_path, + timestamp => '02122020', + log_file_dir => $test_dir, + log_file_name => 'logfile', + product_conf_file_path => $product_config ); $p->_copy_log_to_analysis_dir(); my $analysis_path = $p->analysis_path; @@ -663,24 +660,20 @@ subtest 'Copy log file and product_release config' => sub { # Set log file path to something false to show error behaviour is fine $p = npg_pipeline::pluggable->new( - id_run => 1234, - run_folder => q{123456_IL2_1234}, - runfolder_path => '/nope', - timestamp => '02122020', - log_file_dir => $test_dir, - log_file_name => 'logfile', - product_conf_file_path => $product_config + runfolder_path => '/nope', + timestamp => '02122020', + log_file_dir => $test_dir, + log_file_name => 'logfile', + product_conf_file_path => $product_config ); lives_ok {$p->_copy_log_to_analysis_dir()} 'Log copy to nonexistant runfolder does not die'; $p = npg_pipeline::pluggable->new( - id_run => 1234, - run_folder => q{123456_IL2_1234}, - runfolder_path => $runfolder_path, - timestamp => '02122020', - log_file_dir => '/nuthin', - log_file_name => 'logfile', - product_conf_file_path => $product_config + runfolder_path => $runfolder_path, + timestamp => '02122020', + log_file_dir => '/nuthin', + log_file_name => 'logfile', + product_conf_file_path => $product_config ); lives_ok {$p->_copy_log_to_analysis_dir()} 'Log copy of nonexistant file does not die'; @@ -698,7 +691,7 @@ subtest 'Check resource population from graph' => sub { plan tests => 2; my $p = npg_pipeline::pluggable->new( - id_run => 1234, + id_run => $id_run, function_order => ['run_analysis_complete'], function_list => "$config_dir/function_list_central.json" ); @@ -712,7 +705,7 @@ subtest 'Checking resources are assigned correctly from graph' => sub { plan tests => 4; # Check resources for functions are correctly merged with pipeline-wide settings my $p = npg_pipeline::pluggable->new( - id_run => 1234, + id_run => $id_run, function_list => "$config_dir/function_list_central.json" ); my $resources = $p->_function_resource_requirements('update_ml_warehouse_1', 'update_ml_warehouse'); @@ -732,7 +725,7 @@ subtest 'Checking resources are assigned correctly from graph' => sub { ); $p = npg_pipeline::pluggable->new( - id_run => 1234, + id_run => $id_run, function_list => "$config_dir/function_list_post_qc_review.json" ); $resources = $p->_function_resource_requirements('run_run_archived', 'run_run_archived'); @@ -759,5 +752,4 @@ subtest 'Checking resources are assigned correctly from graph' => sub { } qr{Function run requires both label/name and id}, 'Missing function name causes resource failure'; - };