From 805b988c3219b5d5efbccf456fdbe9463dd622f6 Mon Sep 17 00:00:00 2001 From: Lutz Gross <6036995+LutzGross@users.noreply.github.com> Date: Sun, 10 Mar 2024 17:22:57 +1000 Subject: [PATCH] missing files in Trilinos --- .../commonTools/buildTools/README | 6 + .../buildTools/external/external-configure.pl | 96 +++ .../buildTools/external/makefileSupport.mak | 134 ++++ .../buildTools/generate-makeoptions.pl | 86 +++ .../buildTools/install-package-scripts.sh | 5 + .../buildTools/replace-install-prefix.pl | 89 +++ .../buildTools/strip_dup_incl_paths.pl | 44 ++ .../commonTools/buildTools/strip_dup_libs.pl | 69 ++ .../commonTools/buildTools/template_args.py | 191 +++++ .../commonTools/build_stats/BuildStatsData.py | 81 ++ .../build_stats/BuildStatsGatherTarget.cmake | 90 +++ .../build_stats/BuildStatsSharedVars.cmake | 2 + .../build_stats/BuildStatsWrappers.cmake | 348 +++++++++ .../commonTools/build_stats/CMakeLists.txt | 57 ++ .../build_stats/FindTribitsCiSupportDir.py | 54 ++ .../build_stats/build_stat_wrapper.sh.in | 18 + .../build_stats/cmake/Dependencies.cmake | 1 + .../build_stats/gather_build_stats.py | 316 ++++++++ .../build_stats/get_cwd_for_python.py | 2 + .../remove_all_target_timing_files.sh | 10 + .../build_stats/summarize_build_stats.py | 365 +++++++++ .../build_stats/unit_tests/CMakeLists.txt | 17 + .../target1.timing.bad_type_filesize | 2 + .../target1.timing.empty | 0 .../target1.timing.junk | 3 + .../target1.timing.missing_col_filename | 2 + .../target1.timing.two_data_rows | 3 + .../unit_tests/build_stats.big.small.csv | 22 + .../unit_tests/build_stats.empty.csv | 1 + .../unit_tests/build_stats.incomplete_row.csv | 5 + .../gather_build_stats_UnitTests.py | 361 +++++++++ .../summarize_build_stats_UnitTests.py | 730 ++++++++++++++++++ .../build_stats/wrapper/NMParser.py | 96 +++ .../build_stats/wrapper/Python2and3.py | 36 + .../wrapper/WrapperCommandLineParser.py | 240 ++++++ .../build_stats/wrapper/WrapperOpTimer.py | 227 ++++++ .../build_stats/wrapper/magic_wrapper.py | 88 +++ 37 files changed, 3897 insertions(+) create mode 100644 trilinos_source15/commonTools/buildTools/README create mode 100755 trilinos_source15/commonTools/buildTools/external/external-configure.pl create mode 100644 trilinos_source15/commonTools/buildTools/external/makefileSupport.mak create mode 100755 trilinos_source15/commonTools/buildTools/generate-makeoptions.pl create mode 100755 trilinos_source15/commonTools/buildTools/install-package-scripts.sh create mode 100755 trilinos_source15/commonTools/buildTools/replace-install-prefix.pl create mode 100755 trilinos_source15/commonTools/buildTools/strip_dup_incl_paths.pl create mode 100755 trilinos_source15/commonTools/buildTools/strip_dup_libs.pl create mode 100755 trilinos_source15/commonTools/buildTools/template_args.py create mode 100644 trilinos_source15/commonTools/build_stats/BuildStatsData.py create mode 100644 trilinos_source15/commonTools/build_stats/BuildStatsGatherTarget.cmake create mode 100644 trilinos_source15/commonTools/build_stats/BuildStatsSharedVars.cmake create mode 100644 trilinos_source15/commonTools/build_stats/BuildStatsWrappers.cmake create mode 100644 trilinos_source15/commonTools/build_stats/CMakeLists.txt create mode 100644 trilinos_source15/commonTools/build_stats/FindTribitsCiSupportDir.py create mode 100755 trilinos_source15/commonTools/build_stats/build_stat_wrapper.sh.in create mode 100644 trilinos_source15/commonTools/build_stats/cmake/Dependencies.cmake create mode 100755 trilinos_source15/commonTools/build_stats/gather_build_stats.py create mode 100644 trilinos_source15/commonTools/build_stats/get_cwd_for_python.py create mode 100755 trilinos_source15/commonTools/build_stats/remove_all_target_timing_files.sh create mode 100755 trilinos_source15/commonTools/build_stats/summarize_build_stats.py create mode 100644 trilinos_source15/commonTools/build_stats/unit_tests/CMakeLists.txt create mode 100755 trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.bad_type_filesize create mode 100755 trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.empty create mode 100755 trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.junk create mode 100755 trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.missing_col_filename create mode 100755 trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.two_data_rows create mode 100755 trilinos_source15/commonTools/build_stats/unit_tests/build_stats.big.small.csv create mode 100644 trilinos_source15/commonTools/build_stats/unit_tests/build_stats.empty.csv create mode 100644 trilinos_source15/commonTools/build_stats/unit_tests/build_stats.incomplete_row.csv create mode 100644 trilinos_source15/commonTools/build_stats/unit_tests/gather_build_stats_UnitTests.py create mode 100644 trilinos_source15/commonTools/build_stats/unit_tests/summarize_build_stats_UnitTests.py create mode 100644 trilinos_source15/commonTools/build_stats/wrapper/NMParser.py create mode 100644 trilinos_source15/commonTools/build_stats/wrapper/Python2and3.py create mode 100644 trilinos_source15/commonTools/build_stats/wrapper/WrapperCommandLineParser.py create mode 100644 trilinos_source15/commonTools/build_stats/wrapper/WrapperOpTimer.py create mode 100755 trilinos_source15/commonTools/build_stats/wrapper/magic_wrapper.py diff --git a/trilinos_source15/commonTools/buildTools/README b/trilinos_source15/commonTools/buildTools/README new file mode 100644 index 0000000000..18f00d5c0b --- /dev/null +++ b/trilinos_source15/commonTools/buildTools/README @@ -0,0 +1,6 @@ +The scripts in this directory are for the old autotools build system for +Trilinos that are only used by a few Trilinos packages used in contexts +outside of Trilinos. + +The build tools for the new Trilinos CMake system are in the base-level +directory cmake/. diff --git a/trilinos_source15/commonTools/buildTools/external/external-configure.pl b/trilinos_source15/commonTools/buildTools/external/external-configure.pl new file mode 100755 index 0000000000..47e872e242 --- /dev/null +++ b/trilinos_source15/commonTools/buildTools/external/external-configure.pl @@ -0,0 +1,96 @@ +#!/usr/bin/perl -w +# +# This perl script is used to setup a build directory for +# code that is external to Trilinos but uses the hard work +# of the Trilinos autconf/automake system to define how +# code is compiled, packaged into libraries and linked +# into executables. +# +# +# +# Note, this script must be maintained to be current for +# the Teuchos makefile. +# +use strict; +use Cwd; +use File::Path; +# +# Pares the command-line +# +if( !defined(@ARGV) || scalar(@ARGV) < 3 ) { + die + "Usage: external-configure.pl TRILNOS_BUILD_DIR EXTERNAL_SRC_DIR PACKAGE1 PACAKGE2 ...\n". + " TRILNOS_BUILD_DIR : The path to the base Trilinos build directory\n". + " EXTERNAL_SRC_DIR : The path to the base directory for PACKAGE1, PACAKGE2 ...\n" + ; +} +my $trilinos_build_dir = make_abs_path(cwd(),shift); +my $external_src_dir = make_abs_path(cwd(),shift); +my @external_packages = @ARGV; +# ToDo: Validate that $trilinos_build_dir is an absolute path! +print " trilinos_build_dir = $trilinos_build_dir\n"; +print " external_src_dir = $external_src_dir\n"; +print " external_packages = [", join(",",@external_packages), "]\n"; +# +# Get the source directory for Trilinos +# +my $srcdir_line = `grep \'^srcdir.*=\' $trilinos_build_dir/Makefile`; +#print "srcdir_line: $srcdir_line"; +my $trilinos_src_dir = make_abs_path($trilinos_build_dir,(split(" = ",$srcdir_line))[1]); +chomp $trilinos_src_dir; +# ToDo: If above is a relative path then we must append it to +print " trilinos_src_dir = $trilinos_src_dir\n"; +# +# Create a base-level Makefile that will build the other projects +# +open BASE_MAKEFILE, ">Makefile"; +print BASE_MAKEFILE ":\n"; +foreach(@external_packages) { + print BASE_MAKEFILE "\tcd ${_}; make\n"; +} +print BASE_MAKEFILE "all :\n"; +foreach(@external_packages) { + print BASE_MAKEFILE "\tcd ${_}; make all\n"; +} +print BASE_MAKEFILE "clean :\n"; +foreach(@external_packages) { + print BASE_MAKEFILE "\tcd ${_}; make clean\n"; +} +print BASE_MAKEFILE "clean-obj :\n"; +foreach(@external_packages) { + print BASE_MAKEFILE "\tcd ${_}; make clean-obj\n"; +} +print BASE_MAKEFILE "clean-lib :\n"; +foreach(@external_packages) { + print BASE_MAKEFILE "\tcd ${_}; make clean-lib\n"; +} +print BASE_MAKEFILE "clean-exe :\n"; +foreach(@external_packages) { + print BASE_MAKEFILE "\tcd ${_}; make clean-exe\n"; +} +# +# Create the external subpackage directories and their makefiles +# +foreach(@external_packages) { + my $external_package = $_; + print " Setting up \"$external_package\" ...\n"; + (mkpath($external_package,1,0777) || die $!) if !(-e $external_package); + run_cmnd("cp ${external_src_dir}/${external_package}/Makefile.in ${external_package}/Makefile",1); + run_cmnd("${trilinos_src_dir}/commonTools/refactoring/token-replace.pl _TRILINOS_BUILD_DIR ${trilinos_build_dir} ${external_package}/Makefile ${external_package}/Makefile",1); + run_cmnd("${trilinos_src_dir}/commonTools/refactoring/token-replace.pl _TRILINOS_SRC_DIR ${trilinos_src_dir} ${external_package}/Makefile ${external_package}/Makefile",1); + run_cmnd("${trilinos_src_dir}/commonTools/refactoring/token-replace.pl _BASE_SRC_DIR ${external_src_dir}/${external_package} ${external_package}/Makefile ${external_package}/Makefile",1); +} + +sub run_cmnd { + my $cmnd = shift; + my $stop_on_fail = shift; + #print "$cmnd\n"; + if(system($cmnd) != 0 && $stop_on_fail) { die; } +} + +sub make_abs_path { + my $base_path = shift; + my $path_in = shift; + return "$base_path/$path_in" if( $path_in =~ /^\./ ); + return $path_in; +} diff --git a/trilinos_source15/commonTools/buildTools/external/makefileSupport.mak b/trilinos_source15/commonTools/buildTools/external/makefileSupport.mak new file mode 100644 index 0000000000..11b8849e0e --- /dev/null +++ b/trilinos_source15/commonTools/buildTools/external/makefileSupport.mak @@ -0,0 +1,134 @@ +# +# Build a makefile stub that contains Trilinos autoconf-generated +# macros for compiling and building. We will grab these from +# the tools package Teuchos. +# + +TRILINOS_TEUCHOS_BUILD = $(TRILINOS_BUILD_DIR)/packages/teuchos +TRILINOS_MAKE_OPTIONS_FILE = ./trilinos_make_options.mak +TRILINOS_TEUCHOS_MAKEFILE = $(TRILINOS_TEUCHOS_BUILD)/src/Makefile +TRILINOS_TEUCHOS_EXPORT_MAKEFILE = $(TRILINOS_TEUCHOS_BUILD)/Makefile.export.teuchos + +# Make rule for this file +$(TRILINOS_MAKE_OPTIONS_FILE) : $(TRILINOS_TEUCHOS_MAKEFILE) + $(TRILINOS_SRC_DIR)/packages/teuchos/config/generate-makeoptions.pl $(TRILINOS_TEUCHOS_BUILD)/src/Makefile TEUCHOS > $(TRILINOS_MAKE_OPTIONS_FILE) + +# +# Read in the Trilinos autoconf-generated macros from the file created above +# + +include $(TRILINOS_MAKE_OPTIONS_FILE) +include $(TRILINOS_TEUCHOS_EXPORT_MAKEFILE) # Note, the order is important since TEUCHO_LIBS is redefined here! + +# +# File extenstions +# + +EXTERNAL_LIB_EXT = a + +EXTERNAL_OBJ_EXT = o + +# Set makefile buildsystem macros +# + +# EXTERNAL_C compiler +EXTERNAL_C = $(TEUCHOS_CC) +EXTERNAL_C_DEP_OPT = -MM +EXTERNAL_C_COMPILE_OPT = -c +EXTERNAL_C_OUTPUT_OPT = -o \ + + + +# EXTERNAL_C++ compiler +EXTERNAL_CXX = $(TEUCHOS_CXX) +EXTERNAL_CXX_DEP_OPT = -MM +EXTERNAL_CXX_COMPILE_OPT = -c +EXTERNAL_CXX_OUTPUT_OPT = -o \ + +# Fortran compiler +EXTERNAL_F77 = $(TEUCHOS_F77) +EXTERNAL_F77_COMPILE_OPT = -c +EXTERNAL_F77_OUTPUT_OPT = -o \ + +# Library creator +EXTERNAL_AR = $(TEUCHOS_libteuchos_a_AR) \ + +EXTERNAL_RANLIB = $(TEUCHOS_RANLIB) \ + +# Linker +EXTERNAL_LD = $(TEUCHOS_CXXLD) + +# Install directory (taken from Trilinos' --prefix=??? option) +ifneq ($(TEUCHOS_prefix),) + EXTERNAL_INSTALL_DIR = $(TEUCHOS_prefix) +endif + +# +# Program options +# + +# Preprocessor macro definitions +EXTERNAL_DEFINES += -D_MIN=min -D_MAX=max $(TEUCHOS_DEFS) $(TEUCHOS_CPPFLAGS) + +# Include directories +EXTERNAL_INCLUDES += $(EXTERNAL_INCL_DIR) + +#EXTERNAL_CPPFLAGS += -v + +# Linker Options +EXTERNAL_LDFLAGS = $(TEUCHOS_LDFLAGS) $(TEUCHOS_LIBS) + +# EXTERNAL_C, EXTERNAL_C++ and Fortan compiler options + +EXTERNAL_CFLAGS = $(TEUCHOS_CFLAGS) +EXTERNAL_CXXFLAGS = $(TEUCHOS_CXXFLAGS) +EXTERNAL_F77FLAGS = $(TEUCHOS_FFLAGS) + +# +# Build Rules +# + +# Build object files from EXTERNAL_C source files +%.$(EXTERNAL_OBJ_EXT) : %.c $(TRILINOS_MAKE_OPTIONS_FILE) + $(EXTERNAL_C) $(EXTERNAL_C_COMPILE_OPT) $(EXTERNAL_CPPFLAGS) $(EXTERNAL_EXTRA_CPPFLAGS) \ + $(EXTERNAL_DEFINES) $(EXTERNAL_INCLUDES) \ + $(EXTERNAL_CFLAGS) $(EXTERNAL_EXTRA_CFLAGS) $(EXTERNAL_C_OUTPUT_OPT)$@ $< + +# Build object files from EXTERNAL_C++ source files +%.$(EXTERNAL_OBJ_EXT) : %.cpp $(TRILINOS_MAKE_OPTIONS_FILE) + $(EXTERNAL_CXX) $(EXTERNAL_CXX_COMPILE_OPT) $(EXTERNAL_CPPFLAGS) $(EXTERNAL_EXTRA_CPPFLAGS) \ + $(EXTERNAL_DEFINES) $(EXTERNAL_INCLUDES) \ + $(EXTERNAL_CXXFLAGS) $(EXTERNAL_EXTRA_CXXFLAGS) $(EXTERNAL_CXX_OUTPUT_OPT)$@ $< + +# Build object files from Fotran source files +%.$(EXTERNAL_OBJ_EXT) : %.f $(TRILINOS_MAKE_OPTIONS_FILE) + $(EXTERNAL_F77) $(EXTERNAL_F77_COMPILE_OPT) $(EXTERNAL_F77FLAGS) $(EXTERNAL_EXTRA_F77FLAGS) $(EXTERNAL_F77_OUTPUT_OPT)$@ $< +# $(EXTERNAL_F77) $(EXTERNAL_F77_COMPILE_OPT) $(EXTERNAL_CPPFLAGS) $(EXTERNAL_EXTRA_CPPFLAGS) $(EXTERNAL_F77FLAGS) $(EXTERNAL_EXTRA_F77FLAGS) $(EXTERNAL_F77_OUTPUT_OPT)$@ $< + +# Build dependency files for EXTERNAL_C source files that include header dependencies +%.d: %.c $(TRILINOS_MAKE_OPTIONS_FILE) + $(EXTERNAL_C) $(EXTERNAL_C_DEP_OPT) $(EXTERNAL_CPPFLAGS) $(EXTERNAL_EXTRA_CPPFLAGS) \ + $(EXTERNAL_DEFINES) $(EXTERNAL_INCLUDES) \ + $< \ + | sed 's/$(@:.d=\.$(EXTERNAL_OBJ_EXT))/$(@:.d=.$(EXTERNAL_OBJ_EXT)) $@/' | $(EXTERNAL_DEP_POST_PROC) > $@; [ -s $@ ] || rm -f $@ +# | $(EXTERNAL_BASE_DIR)/Moocho/build/dep_post.pl $@ $(EXTERNAL_OBJ_EXT) | $(EXTERNAL_DEP_POST_PROC) > $@; [ -s $@ ] || rm -f $@ + +# Build dependency files for EXTERNAL_C++ source files that include header dependencies +%.d: %.cpp $(TRILINOS_MAKE_OPTIONS_FILE) + $(EXTERNAL_CXX) $(EXTERNAL_CXX_DEP_OPT) $(EXTERNAL_CPPFLAGS) $(EXTERNAL_EXTRA_CPPFLAGS) \ + $(EXTERNAL_DEFINES) $(EXTERNAL_INCLUDES) \ + $< \ + | sed 's/$(@:.d=\.$(EXTERNAL_OBJ_EXT))/$(@:.d=.$(EXTERNAL_OBJ_EXT)) $@/' | $(EXTERNAL_DEP_POST_PROC) > $@; [ -s $@ ] || rm -f $@ +# | $(EXTERNAL_BASE_DIR)/Moocho/build/dep_post.pl $@ $(EXTERNAL_OBJ_EXT) | $(EXTERNAL_DEP_POST_PROC) > $@; [ -s $@ ] || rm -f $@ + +# +# Universal targets +# + +clean-obj : + rm *.o +clean-lib : + rm *.a +clean-exe : + rm *.exe +clean : clean-obj clean-lib clean-exe diff --git a/trilinos_source15/commonTools/buildTools/generate-makeoptions.pl b/trilinos_source15/commonTools/buildTools/generate-makeoptions.pl new file mode 100755 index 0000000000..a39223efac --- /dev/null +++ b/trilinos_source15/commonTools/buildTools/generate-makeoptions.pl @@ -0,0 +1,86 @@ +#!/usr/bin/perl -w +# +# This perl script graps a bunch of make macro definitions +# generated for Teuchos that can be used in other makefiles. +# This is dumped to stdout and can be redirected to build +# a makefile. +# +# Note, this script must be maintained to be current for +# the Teuchos makefile. +# +use strict; + +if( !(defined(@ARGV) && scalar(@ARGV)==2) ) { + die "Error, this script takes two and only two arguments (makefile_name package_name).!\n"; +} + +my $makefile_name = shift; +my $package_name = shift; + +# +# List the macros you want to grep and include in the output +# +my @macros = + ( + "CC" + ,"CXX" + ,"F77" + ,"CXXLD" + ,"DEFS" + ,"CPPFLAGS" + ,"CFLAGS" + ,"CXXFLAGS" + ,"FFLAGS" + ,"LDFLAGS" + ,"FLIBS" + ,"BLAS_LIBS" + ,"LAPACK_LIBS" + ,"prefix" + ,"AR" + ,"ALTERNATE_AR" + ,"libteuchos_a_AR" + ,"RANLIB" + ); + +open FILE_IN, "<$makefile_name" || die "The file $makefile_name could not be opended for input\n"; +my @makefile_name_array = ; +close FILE_IN; + +# +# Find the above macros and append "${package_name}_" to the beginning. +# +my @new_macros; +my $add_next_line = 0; +foreach( @makefile_name_array ) { + my $line = $_; + if($add_next_line) { + push @new_macros, $line; + if( substr($line,-1,1) eq "\\" ) { + $add_next_line = 1; + } + else { + $add_next_line = 0; + } + next; + } + #print "Line = $line"; + foreach( @macros ) { + my $macro_search = "^${_} "; + #print "Macro search = \'$macro_search\'\n"; + if( $line=~/$macro_search/ ) { + #print "Adding Macro!\n"; + my $find_str = '\(CXX\)'; + my $replace_str = "(${package_name}_CXX)"; + $line=~s/$find_str/$replace_str/; + push @new_macros, "${package_name}_${line}"; + if( substr($line,-2,1) eq "\\" ) { + $add_next_line = 1; + } + else { + $add_next_line = 0; + } + } + } +} + +print join("",@new_macros); diff --git a/trilinos_source15/commonTools/buildTools/install-package-scripts.sh b/trilinos_source15/commonTools/buildTools/install-package-scripts.sh new file mode 100755 index 0000000000..8157c6b232 --- /dev/null +++ b/trilinos_source15/commonTools/buildTools/install-package-scripts.sh @@ -0,0 +1,5 @@ +#!/bin/sh +# Call this script from the 'bootstrap' script of each package +_COMMON_TOOL_BASE_DIR=$1; +cp $_COMMON_TOOL_BASE_DIR/buildTools/*.pl ./config/. +cp $_COMMON_TOOL_BASE_DIR/refactoring/string-replace.pl ./config/. diff --git a/trilinos_source15/commonTools/buildTools/replace-install-prefix.pl b/trilinos_source15/commonTools/buildTools/replace-install-prefix.pl new file mode 100755 index 0000000000..7523b08994 --- /dev/null +++ b/trilinos_source15/commonTools/buildTools/replace-install-prefix.pl @@ -0,0 +1,89 @@ +#!/usr/bin/perl -w +use strict; +use Getopt::Long; +# +# This script is called to do a set of text replacements for installing +# a Mafile.export.package file so that external clients can use it. +# +# Read in commandline arguments +# +my $exec_prefix = ""; # [required] Abs path to base installation directory (i.e. --prefix=??? option passed to configure) +my $my_export_makefile = ""; # [required] Name only of installed Makefile.export.package file +my $my_top_srcdir = ""; # [required] Abs path to this package's top source directory +my $my_incl_dirs = ""; # [required] Abs path to this package's include directories +my $my_lib_dirs = ""; # [optional] Abs path to this package's library directories (if any exist) +my $dep_package_builddirs = ""; # [optional] Abs paths to other directly dependent framework package build directories (if any exist) +GetOptions( + "exec-prefix=s" => \$exec_prefix, + "my-export-makefile=s" => \$my_export_makefile, + "my-abs-top-srcdir=s" => \$my_top_srcdir, + "my-abs-incl-dirs=s" => \$my_incl_dirs, + "my-abs-lib-dirs=s" => \$my_lib_dirs, + "dep-package-abs-builddirs=s" => \$dep_package_builddirs + ); +# +# Validate commandline arguments +# +scalar(@ARGV) == 0 || die; +$exec_prefix ne "" || die; +$my_export_makefile ne "" || die; +$my_top_srcdir ne "" || die; +$my_incl_dirs ne "" || die; +# +# Interpret commandline arguments +# +$exec_prefix = remove_rel_paths($exec_prefix); +my @my_incl_dirs = split(":",$my_incl_dirs); +my @my_lib_dirs = split(":",$my_lib_dirs); +my @dep_export_package_builddirs = split(":",$dep_package_builddirs); +# +# Do the replacements +# +my $my_abs_export_makefile = "${exec_prefix}/include/${my_export_makefile}"; + +my $cmnd_base = "${my_top_srcdir}/config/token-replace.pl "; +# +foreach(@dep_export_package_builddirs) { + if($_ ne "") { + run_cmnd($cmnd_base . "${_} ${exec_prefix}/include ${my_abs_export_makefile} ${my_abs_export_makefile}"); + } +} +# +foreach(@my_incl_dirs) { + if($_ ne "") { + run_cmnd($cmnd_base . "-I${_} -I${exec_prefix}/include ${my_abs_export_makefile} ${my_abs_export_makefile}"); + } +} +# +foreach(@my_lib_dirs) { + if($_ ne "") { + run_cmnd($cmnd_base . "-L${_} -L${exec_prefix}/lib ${my_abs_export_makefile} ${my_abs_export_makefile}"); + } +} +# +run_cmnd($cmnd_base . "${my_top_srcdir}/config ${exec_prefix}/include ${my_abs_export_makefile} ${my_abs_export_makefile}"); +# +# Subroutines +# +sub remove_rel_paths { + my $entry_in = shift; + if ($entry_in=~/-L\.\./) { + return $entry_in; + } + my @paths = split("/",$entry_in); + my @new_paths; + foreach( @paths ) { + if( !($_=~/\.\./) ) { + push @new_paths, $_; + } + else { + pop @new_paths + } + } + return join("/",@new_paths); +} +sub run_cmnd { + my $cmnd = shift; + #print "\n", $cmnd, "\n"; + system($cmnd)==0 || die; +} diff --git a/trilinos_source15/commonTools/buildTools/strip_dup_incl_paths.pl b/trilinos_source15/commonTools/buildTools/strip_dup_incl_paths.pl new file mode 100755 index 0000000000..c628d31159 --- /dev/null +++ b/trilinos_source15/commonTools/buildTools/strip_dup_incl_paths.pl @@ -0,0 +1,44 @@ +#!/usr/bin/perl -w +# This perl script removes duplicate include paths left to the right +use strict; +my @all_incl_paths = @ARGV; +my @cleaned_up_incl_paths; +foreach( @all_incl_paths ) { + $_ = remove_rel_paths($_); + if( !($_=~/-I/) ) { + push @cleaned_up_incl_paths, $_; + } + elsif( !entry_exists($_,\@cleaned_up_incl_paths) ) { + push @cleaned_up_incl_paths, $_; + } +} +print join( " ", @cleaned_up_incl_paths ); +# +# Subroutines +# +sub entry_exists { + my $entry = shift; # String + my $list = shift; # Reference to an array + foreach( @$list ) { + if( $entry eq $_ ) { return 1; } + } + return 0; +} +# +sub remove_rel_paths { + my $entry_in = shift; + if ($entry_in=~/-I\.\./) { + return $entry_in; + } + my @paths = split("/",$entry_in); + my @new_paths; + foreach( @paths ) { + if( !($_=~/\.\./) ) { + push @new_paths, $_; + } + else { + pop @new_paths + } + } + return join("/",@new_paths); +} diff --git a/trilinos_source15/commonTools/buildTools/strip_dup_libs.pl b/trilinos_source15/commonTools/buildTools/strip_dup_libs.pl new file mode 100755 index 0000000000..cdf4b42a90 --- /dev/null +++ b/trilinos_source15/commonTools/buildTools/strip_dup_libs.pl @@ -0,0 +1,69 @@ +#!/usr/bin/perl -w +# This perl script removes duplicate libraries from the right to the left and +# removes duplicate -L library paths from the left to the right +use strict; + +my @all_libs = @ARGV; +# +# Move from left to right and remove duplicate -l libraries +# +my @cleaned_up_libs_first; +foreach( reverse @all_libs ) { + $_ = remove_rel_paths($_); + if( $_=~/-L/ ) { + unshift @cleaned_up_libs_first, $_; + } + else { + if( !entry_exists($_,\@cleaned_up_libs_first) ) { + unshift @cleaned_up_libs_first, $_; + } + } +} + +# +# Move from right to left and remove duplicate -L library paths +# +my @cleaned_up_libs; +foreach( @cleaned_up_libs_first ) { + $_ = remove_rel_paths($_); + if( !($_=~/-L/) ) { + push @cleaned_up_libs, $_; + } + elsif( !entry_exists($_,\@cleaned_up_libs) ) { + push @cleaned_up_libs, $_; + } +} +# +# Print the new list of libraries and paths +# +print join( " ", @cleaned_up_libs ); + +# +# Subroutines +# +sub entry_exists { + my $entry = shift; # String + my $list = shift; # Reference to an array + foreach( @$list ) { + if( $entry eq $_ ) { return 1; } + } + return 0; +} +# +sub remove_rel_paths { + my $entry_in = shift; + if ($entry_in=~/-L\.\./) { + return $entry_in; + } + my @paths = split("/",$entry_in); + my @new_paths; + foreach( @paths ) { + if( !($_=~/\.\./) ) { + push @new_paths, $_; + } + else { + pop @new_paths + } + } + return join("/",@new_paths); +} diff --git a/trilinos_source15/commonTools/buildTools/template_args.py b/trilinos_source15/commonTools/buildTools/template_args.py new file mode 100755 index 0000000000..496bab5047 --- /dev/null +++ b/trilinos_source15/commonTools/buildTools/template_args.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python +# -*- python -*- +""" +template_args.py - A python script for parsing C++ code and listing all unique + arguments for a given C++ template. + +""" +__version__ = "1.0" +__author__ = "Bill Spotz" +__date__ = "Oct 31 2006" + +# System imports +from optparse import * +import os +import re +import sys + +################################################################################ + +def removeComments(text): + """ + Return a copy of text with its C-style and C++-style comments removed. + """ + cCommentRE = re.compile(r"\/\*.*?\*\/", re.DOTALL ) + cppCommentRE = re.compile(r"\/\/.*$", re.MULTILINE) + + for commentRE in (cCommentRE, cppCommentRE): + match = True + while match: + match = commentRE.search(text) + if match: + if match.end() > len(text): + text = text[:match.start()] + else: + text = text[:match.start()] + text[match.end():] + return text + +################################################################################ + +def findBlock(text, pos=0): + """ + Given the input text (potentially multiline) and an optional pos marking the + starting position, find an opening delimeter -- either (, [, {, <, single + quote, or double quote -- and return a tuple of integers indicating the + character indexes of the text block -- closed with ), ], }, >, single quote, + or double quote, respectively -- while correctly handling nested blocks. + """ + + # Define delimeter strings + quote1Delimeter = "'" + quote2Delimeter = '"' + openDelimeters = "\(\[\{<" + closeDelimeters = "\)\]\}>" + + # Define delimeter regular expressions + quote1RE = re.compile("([" + quote1Delimeter + "])", re.M) + quote2RE = re.compile("([" + quote2Delimeter + "])", re.M) + openRE = re.compile("([" + openDelimeters + + quote1Delimeter + + quote2Delimeter + "])", re.M) + anyRE = re.compile("([" + openDelimeters + + quote1Delimeter + + quote2Delimeter + + closeDelimeters + "])", re.M) + + # Find the first opening delimeter + matchObject = openRE.search(text, pos) + if not matchObject: return (None, None) + + # Initialize the loop + stack = [ matchObject.group() ] + start = matchObject.start() + pos = start + 1 + + # Find the end of the block + while matchObject: + + # Determine the active delimeter regular expression + if stack[-1] == quote1Delimeter: + activeRE = quote1RE + elif stack[-1] == quote2Delimeter: + activeRE = quote2RE + else: + activeRE = anyRE + + # Search for the next delimeter + matchObject = activeRE.search(text, pos) + if matchObject: + delimeter = matchObject.group() + pos = matchObject.end() + + # Check for matched delimeters + if (((stack[-1] == quote1Delimeter) and + (delimeter == quote1Delimeter)) or + ((stack[-1] == quote2Delimeter) and + (delimeter == quote2Delimeter)) or + ((stack[-1] == "(" ) and + (delimeter == ")" )) or + ((stack[-1] == "[" ) and + (delimeter == "]" )) or + ((stack[-1] == "{" ) and + (delimeter == "}" )) or + ((stack[-1] == "<" ) and + (delimeter == ">" ))): + stack.pop() # Remove the last element from the list + if len(stack) == 0: + return (start, pos) + + # Process unmatched delimeter + else: + if (delimeter in openDelimeters or + delimeter == quote1Delimeter or + delimeter == quote2Delimeter ): + stack.append(delimeter) # Add the delimeter to the stack + else: + raise RuntimeError, "findBlock: mismatched delimeters: " + \ + stack[-1] + " " + delimeter + + # We made it through all of text without finding the end of the block + raise RuntimeError, "findBlock: open block: " + join(stack) + +################################################################################ + +def main(): + + # Set up the command-line parser object + prog = os.path.split(__file__)[1] + usage = __doc__ + "usage: " + prog + " [options] template file1 ..." + version = prog + " " + __version__ + " " + __date__ + parser = OptionParser(usage=usage,version=version) + parser.set_defaults(verbose=False) + parser.add_option("-v", "--verbose", action="store_true" , dest="verbose", + help="run in verbose mode") + parser.add_option("-q", "--quiet" , action="store_false", dest="verbose", + help="run in quiet mode [default]") + + # Get the options and arguments from the command line + (options,args) = parser.parse_args() + + # Check the arguments + if len(args) < 2: + print usage + sys.exit(-1) + template = args[0] + filenames = args[1:] + + # Generate the regular expression + templateRE = re.compile(template + "<") + + # Initialize the list of template arguments + template_args = [ ] + + # Loop over the filenames + for filename in filenames: + if options.verbose: + print "Processing '%s' ..." % (filename,), + try: + text = open(filename,"r").read() + except IOError: + print + print "Could not open '%s'." % filename + sys.exit(-2) + + # Remove the comments from the text + text = removeComments(text) + + # Loop throught the text, searching for our template + pos = 0 + match = True + while match: + match = templateRE.search(text,pos) + if match: + (start,end) = findBlock(text, match.start()) + arg = text[start+1:end-1].strip() + if not arg in template_args: + template_args.append(arg) + pos = end + 1 + if options.verbose: + print "ok" + + # Process the template arguments + template_args.sort() + if options.verbose: + print "\nTemplate arguments for %s< ... >:" % template + for arg in template_args: + print arg + +################################################################################ + +if __name__ == "__main__": + main() diff --git a/trilinos_source15/commonTools/build_stats/BuildStatsData.py b/trilinos_source15/commonTools/build_stats/BuildStatsData.py new file mode 100644 index 0000000000..51ba1467ff --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/BuildStatsData.py @@ -0,0 +1,81 @@ +from FindTribitsCiSupportDir import * +import GeneralScriptSupport as GSS + + +# Standard set of build stats fields we want to read in +# +def getStdBuildStatsColsAndTypesList(): + return [ + ColNameAndType('max_resident_size_Kb', 'float'), + ColNameAndType('elapsed_real_time_sec', 'float'), + ColNameAndType('FileName', 'string'), + ColNameAndType('FileSize', 'float'), + ] +# NOTE: Above, we use type 'float' instead of 'int' for fields that are ints +# because we want to allow a very large size. + + +def getColNameTypeIdxListGivenColNameAndTypeList(csvFileName, columnHeadersList, + colNameAndTypesToGetList, + ): + colNameTypeIdxList = [] + for colNameAndTypeToGet in colNameAndTypesToGetList: + colIdx = GSS.findInSequence(columnHeadersList, colNameAndTypeToGet.colName()) + if colIdx != -1: + colNameTypeIdxList.append(ColNameTypeIdx(colNameAndTypeToGet, colIdx)) + else: + raise Exception( + "Error, the CSV file column header '"+colNameAndTypeToGet.colName()+"'"+\ + " does not exist in the list of column headers "+str(columnHeadersList)+\ + " from the CSV file '"+csvFileName+"'!") + return colNameTypeIdxList + + +class ColNameAndType(object): + def __init__(self, colName, colType): + self.__colName = colName + self.__colType = colType + self.assertType() + def colName(self): + return self.__colName + def colType(self): + return self.__colType + def __repr__(self): + myStr = "ColNameAndType{"+self.__colName+","+str(self.__colType)+"}" + return myStr + def convertFromStr(self, strIn): + if self.__colType == "string": + return strIn + elif self.__colType == "int": + return int(strIn) + elif self.__colType == "float": + return float(strIn) + def assertType(self): + supportedTypes = [ "string", "int", "float" ] + if -1 == GSS.findInSequence(supportedTypes, self.__colType): + raise Exception( + "Error, type '"+str(self.__colType)+"' is not supported! Supported types"+\ + " include "+str(supportedTypes)+"!") + def __eq__(self, other): + return((self.__colName,self.__colType)==(other.__colName,other.__colType)) + def __ne__(self, other): + return((self.__colName,self.__colType)!=(other.__colName,other.__colType)) + + +class ColNameTypeIdx(object): + def __init__(self, colNameAndType, colIdx): + self.__colNameAndType = colNameAndType + self.__colIdx = colIdx + def colName(self): + return self.__colNameAndType.colName() + def getIdx(self): + return self.__colIdx + def convertFromStr(self, strIn): + return self.__colNameAndType.convertFromStr(strIn) + def __repr__(self): + myStr = "ColNameTypeIdx{"+str(self.__colNameAndType)+","+str(self.__colIdx)+"}" + return myStr + def __eq__(self, other): + return ((self.__colNameAndType,self.__colIdx)==(other.__colNameAndType,other.__colIdx)) + def __ne__(self, other): + return ((self.__colNameAndType,self.__colIdx)!=(other.__colNameAndType,other.__colIdx)) diff --git a/trilinos_source15/commonTools/build_stats/BuildStatsGatherTarget.cmake b/trilinos_source15/commonTools/build_stats/BuildStatsGatherTarget.cmake new file mode 100644 index 0000000000..c2070abefe --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/BuildStatsGatherTarget.cmake @@ -0,0 +1,90 @@ +################################################################################ +# +# Add target for gathering up build stats +# +################################################################################ + + +include("${CMAKE_CURRENT_LIST_DIR}/BuildStatsSharedVars.cmake") + + +# Create custom 'gather-build-stats' target that will run last +# +# NOTE: This function must be called at the very end of all of the build +# targets that get created for a project! +# +function(add_target_gather_build_stats) + + if (${PROJECT_NAME}_ENABLE_BUILD_STATS) + + add_custom_command( + OUTPUT "${BUILD_STATS_CSV_FILE}" + COMMAND "${BUILD_STATS_SRC_DIR}/gather_build_stats.py" + WORKING_DIRECTORY "${${PROJECT_NAME}_BINARY_DIR}" ) + + add_custom_target(gather-build-stats ALL + DEPENDS "${BUILD_STATS_CSV_FILE}") + + get_all_build_targets_including_in_subdirs("${${PROJECT_NAME}_SOURCE_DIR}" + projectBuildTargetsList) + + if (projectBuildTargetsList) + add_dependencies(gather-build-stats ${projectBuildTargetsList}) + endif() + + endif() + +endfunction() + + +# Get a list all of the lib and exec build targets starting in a subdir and in +# below subdirs. +# +function(get_all_build_targets_including_in_subdirs srcdir targetsListVarOut) + + set(targetsList "") + + # Recurse into subdirectories. + get_property(dirs DIRECTORY ${srcdir} PROPERTY SUBDIRECTORIES) + foreach(d IN LISTS dirs) + get_all_build_targets_including_in_subdirs(${d} targetsSubdirList) + list(APPEND targetsList ${targetsSubdirList}) + endforeach() + + # Get the targets from this directory. + get_property(allTargetsThisDir DIRECTORY ${srcdir} PROPERTY BUILDSYSTEM_TARGETS) + filter_only_build_targets(allTargetsThisDir buildTargetsThisDir) + list(APPEND targetsList ${buildTargetsThisDir}) + + # Return + set(${targetsListVarOut} ${targetsList} PARENT_SCOPE) + +endfunction() + + +function(filter_only_build_targets targetListInVar targetListOutVar) + + #print_var(targetListInVar) + #print_var(${targetListInVar}) + + set(targetListOut "") + + foreach (target IN LISTS ${targetListInVar}) + #print_var(target) + get_property(targetType TARGET ${target} PROPERTY TYPE) + #print_var(targetType) + if ( + targetType STREQUAL "STATIC_LIBRARY" OR + targetType STREQUAL "SHARED_LIBRARY" OR + targetType STREQUAL "EXECUTABLE" + ) + #message("-- " "${target} is a regular build target!") + list(APPEND targetListOut ${target}) + else() + #message("-- " "${target} is **NOT** a regular build target!") + endif() + endforeach() + + set(${targetListOutVar} ${targetListOut} PARENT_SCOPE) + +endfunction() diff --git a/trilinos_source15/commonTools/build_stats/BuildStatsSharedVars.cmake b/trilinos_source15/commonTools/build_stats/BuildStatsSharedVars.cmake new file mode 100644 index 0000000000..fa89221ea2 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/BuildStatsSharedVars.cmake @@ -0,0 +1,2 @@ +set(BUILD_STATS_SRC_DIR "${CMAKE_CURRENT_LIST_DIR}") +set(BUILD_STATS_CSV_FILE "${${PROJECT_NAME}_BINARY_DIR}/build_stats.csv") diff --git a/trilinos_source15/commonTools/build_stats/BuildStatsWrappers.cmake b/trilinos_source15/commonTools/build_stats/BuildStatsWrappers.cmake new file mode 100644 index 0000000000..5dfcdf03b1 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/BuildStatsWrappers.cmake @@ -0,0 +1,348 @@ +################################################################################ +# +# Set up for build stats compiler wrappers and gathering up build stats +# +################################################################################ + + +include("${CMAKE_CURRENT_LIST_DIR}/BuildStatsSharedVars.cmake") + + +# Generate the build stats compiler wrappers if asked to do so. +# +function(generate_build_stats_wrappers) + + set_project_enable_build_stats_var() + + if (${PROJECT_NAME}_ENABLE_BUILD_STATS) + build_stats_find_and_check_time() # Sets cache var BUILD_STATS_TIME_CMD + if(NOT BUILD_STATS_TIME_CMD) + message("-- ${PROJECT_NAME}_ENABLE_BUILD_STATS=ON, but valid GNU Time was not found") + message("-- NOTE: Force setting ${PROJECT_NAME}_ENABLE_BUILD_STATS=OFF!") + set(${PROJECT_NAME}_ENABLE_BUILD_STATS OFF CACHE BOOL + "Forced to 'OFF' since valid 'time' command not found" FORCE) + return() + endif() + + get_base_build_dir_for_python() + + generate_build_stats_wrapper_for_op(C WRAP CMAKE_C_COMPILER) + generate_build_stats_wrapper_for_op(CXX WRAP CMAKE_CXX_COMPILER) + if (${PROJECT_NAME}_ENABLE_Fortran) + generate_build_stats_wrapper_for_op(Fortran WRAP CMAKE_Fortran_COMPILER) + endif() + + generate_build_stats_wrapper_for_op(LD WRAP CMAKE_LD ALLOW_FIND) + generate_build_stats_wrapper_for_op(AR WRAP CMAKE_AR ALLOW_FIND) + generate_build_stats_wrapper_for_op(RANLIB WRAP CMAKE_RANLIB ALLOW_FIND) + # NOTE: LD, AR, and RANDLIB can be used even in builds where + # BUILD_SHARED_LIBS=ON because individual add_librariy() commands can + # request static libraries be built. + + set(BUILD_STATS_COMPLETED_FIRST_CONFIG TRUE CACHE INTERNAL "") + endif() + +endfunction() + + +# Macro that sets the cache var ${PROJECT_NAME}_ENABLE_BUILD_STATS +# +macro(set_project_enable_build_stats_var) + + if (NOT "$ENV{${PROJECT_NAME}_ENABLE_BUILD_STATS}" STREQUAL "") + # Use the default set in the env (overrides any local default set) + set(${PROJECT_NAME}_ENABLE_BUILD_STATS_DEFAULT + "$ENV{${PROJECT_NAME}_ENABLE_BUILD_STATS}") + elseif(NOT "${${PROJECT_NAME}_ENABLE_BUILD_STATS_DEFAULT}" STREQUAL "") + # ${PROJECT_NAME}_ENABLE_BUILD_STATS_DEFAULT was already set, so use it as + # the default. + else() + # No default was set, so make it OFF by default + set(${PROJECT_NAME}_ENABLE_BUILD_STATS_DEFAULT OFF) + endif() + + advanced_set(${PROJECT_NAME}_ENABLE_BUILD_STATS + ${${PROJECT_NAME}_ENABLE_BUILD_STATS_DEFAULT} CACHE BOOL + "If set to 'ON', then compiler wrappers will be created and used to gather build stats." + ) + +endmacro() + + +# Find the GNU 'time' command that is used by magic_wrapper.py to extract the +# info out of the command that it runs. +# +# If this finds the GNU 'time' command and it behaves correctly, then it sets +# the cache var BUILD_STATS_TIME_CMD on output. If BUILD_STATS_TIME_CMD is +# already set by the user in the cache and it is found to not behave +# correctly, then BUILD_STATS_TIME_CMD will be removed from the cache. +# +function(build_stats_find_and_check_time) + + # let the user provide BUILD_STATS_TIME_CMD + if (BUILD_STATS_TIME_CMD) + message("-- BUILD_STATS_TIME_CMD=${BUILD_STATS_TIME_CMD}") + set(GNU_TIME_EXE "${BUILD_STATS_TIME_CMD}") + else() + find_program(GNU_TIME_EXE "time" HINTS "/usr/bin") + if(GNU_TIME_EXE) + message("-- Found time at ${GNU_TIME_EXE}") + else() + message("-- GNU time NOT found") + message("-- Install GNU time and/or set BUILD_STATS_TIME_CMD=/path/to/time") + return() + endif() + endif() + + # This should ideally call the python script and request the fields to test, + # add 'badflag' or some other nonsense. + SET(GNU_TIME_POSSIBLE_FIELDS "e;M;K;D;X;F;R;W;w;c;S;U;P;I;O;r;s;k;x") + SET(GNU_TIME_SUPPORTED_FIELDS "") + + # Should ideally ask for the dtypes or suitable regexes to vet them + foreach(flag ${GNU_TIME_POSSIBLE_FIELDS}) + message(DEBUG "----------------------") + message(DEBUG "Time: Testing field ${flag}") + # The output from time goes to stderr, the programs output to stdout + execute_process(COMMAND "${GNU_TIME_EXE}" + "--format=%${flag}" "true" + # this is useless - we run a noop command + RESULT_VARIABLE GNU_TIME_RC + # capture stderr + ERROR_VARIABLE GNU_TIME_OUTPUT + ) + # If this fails, then something is broken on the system. The checks after + # will likely fail, because they expect a predefined format for stderr + # text. + if(NOT GNU_TIME_RC EQUAL 0) + message(DEBUG "Time invocation error returned `${GNU_TIME_RC}` but expected `0`") + message("-- GNU_TIME_EXE=${GNU_TIME_EXE} does not work") + message("-- Unset BUILD_STATS_TIME_CMD since '${GNU_TIME_EXE}' is invalid!") + unset(BUILD_STATS_TIME_CMD CACHE) + return() + endif() + + # For now, just assert that all expected fields are supported (see + # discussion after function of other possible options). + if("${GNU_TIME_OUTPUT}" MATCHES "^?${flag}.*") + message("-- Time does not support Field: ${flag}") + message("-- GNU_TIME_EXE=${GNU_TIME_EXE} does not work") + message("-- Unset BUILD_STATS_TIME_CMD since '${GNU_TIME_EXE}' is invalid!") + unset(BUILD_STATS_TIME_CMD CACHE) + return() + else() + message(DEBUG "-- Time supports Field: ${flag}") + list(APPEND GNU_TIME_SUPPORTED_FIELDS "${flag}") + endif() + endforeach() + + # If we get here, we should have a list of supported fields from TIME. + set(BUILD_STATS_TIME_CMD ${GNU_TIME_EXE} + CACHE FILEPATH "The GNU time binary required by build_stats" FORCE ) +endfunction() +# +# NOTE: Above, the GNU_TIME_SUPPORTED_FIELDS list var is currently not being +# used for anything but in the future, it could be exported to the env as +# TRILINOS_BUILD_STATS_OUTPUT_FIELDS for the magic_wapper.py to use to pass in +# to the 'time' command for fields that are known to be supported. This would +# override the default fields specified there. Note that `time` will actually +# silently accept bad fields, and give `?field` back. If we were to set +# TRILINOS_BUILD_STATS_OUTPUT_FIELDS the GNU_TIME_SUPPORTED_FIELDS then bad +# fields will simply not be written to a file. +# +# One unimplemented feature in the wrapper is +# `TRILINOS_BUILD_STATS_PARSE_NM` which we could control if NM is used. Like +# 'time', we expect it to work and we could `find_program()` it as well. + + +# Get the non-cache var BASE_BUILD_DIR_FOR_PYTHON +# +# This var gets picked up in the configure of build_stat_wrapper.sh.in. +# +macro(get_base_build_dir_for_python) + set(get_cwd_for_python ${BUILD_STATS_SRC_DIR}/get_cwd_for_python.py) + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${get_cwd_for_python} + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + OUTPUT_VARIABLE BASE_BUILD_DIR_FOR_PYTHON + OUTPUT_STRIP_TRAILING_WHITESPACE) +endmacro() +# +# NOTE: We need this function to get the value of os.getcwd() from Python so +# that it matches the value returned inside of magic_wapper.py. The issue is +# that some platforms, CMake determines a different absolute base build dir +# for systems with mounted filesystems. The only systems I know this happens +# on are some systems at SNL with the mounted home directories. By using the +# same Python code, we ensure that we get the same base directory, which is +# needed when computing relative paths. + + +# Generate the build stats compiler wrapper for a given CMake variable. +# +# Usage: +# +# generate_build_stats_wrapper_for_op( WRAP [ALLOW_FIND]) +# +# The intent of this function is pass in arbitrary cmake variables +# that map to commands and generate suitable wrappers. +# +# is the short name, like C, CXX, Fortran, LD, AR, RANLIB. +# +function(generate_build_stats_wrapper_for_op op_name) + cmake_parse_arguments( + PARSE_ARGV 1 + BUILD_STATS # prefix + "ALLOW_FIND" # options + "WRAP" # one_value_keywords + "" # multi_value_keywords + ) + set(variable_to_set "${BUILD_STATS_WRAP}") + + string(TOLOWER "${op_name}" op_lc) + set(op_wrapper "${${PROJECT_NAME}_BINARY_DIR}/build_stat_${op_lc}_wrapper.sh") + + generate_build_stats_wrapper_for_op_find_op_lc() # Sets ${variable_to_set} + + # Override the op with the wrapper but remember the original command + if (NOT BUILD_STATS_COMPLETED_FIRST_CONFIG) + if (${variable_to_set}) # True if was set on input or was found above + set(${variable_to_set}_ORIG ${${variable_to_set}} + CACHE FILEPATH "Original non-wrapped ${op_name}" FORCE ) + set(${variable_to_set} "${op_wrapper}" + CACHE FILEPATH "Overwritten build stats ${op_name} wrapper" FORCE ) + + message("-- " "Generating build stats wrapper for ${op_name}") + set(BUILD_STATS_WRAPPER_INNER_OP "${${variable_to_set}_ORIG}") + configure_file("${BUILD_STATS_SRC_DIR}/build_stat_wrapper.sh.in" + "${op_wrapper}" @ONLY) + + set(${variable_to_set}_OP_FOR_CONFIG_FILE_INSTALL_DIR + "${${variable_to_set}_ORIG}" CACHE INTERNAL "") + else() + message("-- Not wrapping ${op_name} because " + "${variable_to_set}=`${variable_to_set}` is not set." + " To enable statistics set ${variable_to_set}.") + endif() + endif() +endfunction() +# +# NOTE: Above, if this is not the first configure (and +# BUILD_STATS_COMPLETED_FIRST_CONFIG is unset) then we don't want to do +# anything different with the build stats wrappers. For example, we don't +# want CMAKE_CXX_FLAGS to be empty on the first configure when this function +# is called and have CMake to find the C++ compiler later in the first +# configure and then on the reconfigure have a build stats wrapper generated +# for the C++ compiler. If this happened, then the C++ code would build with +# the raw C++ compiler after the first configure but after the second and +# subsequent (re)configures would (re)build the code with the build-stats +# wrapped compiler. It seems like a bad idea to have the code build +# differently on a reconfigure even if the user does not do anything other +# than trigger a reconfigure (e.g. by touching a CMakeLists.txt file or adding +# a new source file). + + +# Helper macro to shorten above function some +# +# Sets ${variable_to_set} if ${op_lc} is found. +# +macro(generate_build_stats_wrapper_for_op_find_op_lc) + # there's an issue here - if CMAKE_FOO is unset (whatever `variable_to_set` is) + # we need a to know the command - but CMake hasn't chosen one yet... + if( BUILD_STATS_ALLOW_FIND + AND ( + ("${${variable_to_set}}" STREQUAL "") + OR + (NOT ${variable_to_set}) + ) + ) + message("-- " "${variable_to_set} is not set, but a wrapper has been requested. Asking CMake to find ${op_lc}") + find_program(${variable_to_set} "${op_lc}") + print_var(${variable_to_set}) + endif() +endmacro() + + +# Remove the build stats file on each configure if asked to do so. +# +function(remove_build_stats_file_on_configure) + + advanced_set(${PROJECT_NAME}_REMOVE_BUILD_STATS_ON_CONFIGURE OFF + ${${PROJECT_NAME}_REMOVE_BUILD_STATS_ON_CONFIGURE_DEFAULT} CACHE BOOL + "If set to 'ON', then the build_stats.csv file will be removed on each configure." ) + + if ( + (${PROJECT_NAME}_REMOVE_BUILD_STATS_ON_CONFIGURE) + AND + (EXISTS "${BUILD_STATS_CSV_FILE}") + ) + MESSAGE("-- " "Removing existing file '${BUILD_STATS_CSV_FILE}'") + file(REMOVE "${BUILD_STATS_CSV_FILE}") + endif() + +endfunction() + + +# Remove the .timing files on a fresh configure if asked to do so. +# +function(remove_build_stats_timing_files_on_fresh_configure) + + advanced_set(${PROJECT_NAME}_REMOVE_BUILD_STATS_TIMING_FILES_ON_FRESH_CONFIGURE OFF + CACHE BOOL + "If set to 'ON', then all .timing files will be removed on a freash configure." + ) + + if ( + ${PROJECT_NAME}_REMOVE_BUILD_STATS_TIMING_FILES_ON_FRESH_CONFIGURE + AND + (NOT "${${PROJECT_NAME}_BUILD_STATS_INIT_CONFIG_WAS_DONE}") + ) + + message("-- " "Removing all .timing files on fresh configure") + + execute_process( + COMMAND "${BUILD_STATS_SRC_DIR}/remove_all_target_timing_files.sh" + ${PROJECT_BINARY_DIR} ) + + set(${PROJECT_NAME}_BUILD_STATS_INIT_CONFIG_WAS_DONE ON CACHE INTERNAL "") + + endif() + +endfunction() + + +# Set up install targets for the build stats scripts +# +# This can only be called after these install dirs have been set! +# +function(install_build_stats_scripts) + + # disable this for now... + return() + + if (${PROJECT_NAME}_ENABLE_BUILD_STATS) + install_build_stats_wrapper_for_lang(C) + install_build_stats_wrapper_for_lang(CXX) + if (${PROJECT_NAME}_ENABLE_Fortran) + install_build_stats_wrapper_for_lang(Fortran) + endif() + + install_build_stats_wrapper_for_lang(AR) + install_build_stats_wrapper_for_lang(LD) + install_build_stats_wrapper_for_lang(RANLIB) + endif() + +endfunction() + + +# Install the build stats compiler wrapper for a single language. +# +function(install_build_stats_wrapper_for_lang lang) + string(TOLOWER "${op_name}" op_lc) + set(op_wrapper + "${${PROJECT_NAME}_BINARY_DIR}/build_stat_${op_lc}_wrapper.sh") + + install(PROGRAMS "${op_wrapper}" + DESTINATION "${${PROJECT_NAME}_INSTALL_RUNTIME_DIR}") +endfunction() + + diff --git a/trilinos_source15/commonTools/build_stats/CMakeLists.txt b/trilinos_source15/commonTools/build_stats/CMakeLists.txt new file mode 100644 index 0000000000..158048338f --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/CMakeLists.txt @@ -0,0 +1,57 @@ +tribits_package(TrilinosBuildStats) + +include("${CMAKE_CURRENT_LIST_DIR}/BuildStatsGatherTarget.cmake") +add_target_gather_build_stats() +# NOTE: We define this build target here after all of the other packages are +# done getting defined so that it will have a dependency on all defined +# library and executable targets but before the end of the tribits_project() +# command so that the TriBITS Ninja Makefile generator will pick this up in +# the documentation! + +# +# Create summary report of build stats if build stats are enabled +# + +tribits_add_advanced_test( Results + EXCLUDE_IF_NOT_TRUE ${PROJECT_NAME}_ENABLE_BUILD_STATS + OVERALL_WORKING_DIRECTORY TEST_NAME + OVERALL_NUM_MPI_PROCS 1 + TEST_0 + MESSAGE "Gather up the build stats in case the build failed" + CMND "${CMAKE_CURRENT_LIST_DIR}/gather_build_stats.py" + ARGS -v + WORKING_DIRECTORY "${${PROJECT_NAME}_BINARY_DIR}" + SKIP_CLEAN_WORKING_DIRECTORY # Critical or you delete the entire working dir! + ALWAYS_FAIL_ON_NONZERO_RETURN + TEST_1 + MESSAGE "Sumarize the build stats from the already created build_stats.csv file (CTEST_FULL_OUTPUT)" + CMND "${${PROJECT_NAME}_SOURCE_DIR}/commonTools/build_stats/summarize_build_stats.py" + ARGS --bin-by-subdirs-under-dirs=commonTools,packages + "${${PROJECT_NAME}_BINARY_DIR}/build_stats.csv" + ALWAYS_FAIL_ON_NONZERO_RETURN + ADDED_TEST_NAME_OUT Results_TEST_NAME + ) + # Note, above CTEST_FULL_OUTPUT is in the MESSAGE to get ctest to keep the + # full test output and send and display on CDash. + +if (Results_TEST_NAME) + set_tests_properties( ${Results_TEST_NAME} PROPERTIES + ATTACHED_FILES "${${PROJECT_NAME}_BINARY_DIR}/build_stats.csv") +endif() + +# NOTE: Above, it is harmless to gather up the build_stats.csv file again in +# this test if the build passed and it has already been gathered up. But if +# the build failed, then the file build_stats.csv never got updated so it is +# critcial to update it in this test. Otherwise, you will be displaying an +# old build_stats.csv file from a previous build, which is not good. + + +# +# Add unit tests for build_stats related support code (even if build stats is +# not enabled for this build). +# + +tribits_add_test_directories(unit_tests) + + +tribits_package_postprocess() diff --git a/trilinos_source15/commonTools/build_stats/FindTribitsCiSupportDir.py b/trilinos_source15/commonTools/build_stats/FindTribitsCiSupportDir.py new file mode 100644 index 0000000000..86e1a8ae18 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/FindTribitsCiSupportDir.py @@ -0,0 +1,54 @@ +# @HEADER +# ************************************************************************ +# +# TriBITS: Tribal Build, Integrate, and Test System +# Copyright 2013 Sandia Corporation +# +# Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation, +# the U.S. Government retains certain rights in this software. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# 3. Neither the name of the Corporation nor the names of the +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY +# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# ************************************************************************ +# @HEADER + +import os +import sys + +thisScriptsDir = os.path.dirname(os.path.abspath(__file__)) +trilinosTriBITSDirEnv = os.environ.get('Trilinos_TRIBITS_DIR', None) +if trilinosTriBITSDirEnv: + tribitsDir = trilinosTriBITSDirEnv +else: + tribitsDir = os.path.abspath( + os.path.join(thisScriptsDir, "../../cmake/tribits") ) +ciSupportDir = os.path.join(tribitsDir, "ci_support") +pythonUtilsDir = os.path.join(tribitsDir, "python_utils") +#print "ciSupportDir =", ciSupportDir + +sys.path = [ciSupportDir, pythonUtilsDir] + sys.path diff --git a/trilinos_source15/commonTools/build_stats/build_stat_wrapper.sh.in b/trilinos_source15/commonTools/build_stats/build_stat_wrapper.sh.in new file mode 100755 index 0000000000..28d18f611c --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/build_stat_wrapper.sh.in @@ -0,0 +1,18 @@ +#!/bin/bash +# Consumed by the magic_wrapper + +export TRILINOS_BUILD_STATS_PARSE_NM="false" +# BUILD_STATS_INNER_OP is the command we are wrapping +export TRILINOS_BUILD_STATS_INNER_OP="@BUILD_STATS_WRAPPER_INNER_OP@" +# BUILD_STATS_TIME_CMD points to a valid GNU Time executable +export TRILINOS_BUILD_STATS_TIME_CMD="@BUILD_STATS_TIME_CMD@" +# We need to know the `root` of the build tree so we annotate +# paths correctly (see github PR 8638 for issue with Makefile builds) +export TRILINOS_BUILD_STATS_BASE_DIR="@BASE_BUILD_DIR_FOR_PYTHON@" + +if [ "${CMAKE_IS_IN_CONFIGURE_MODE}" == "1" ]; then + ${TRILINOS_BUILD_STATS_INNER_OP} "$@" +else + "@PYTHON_EXECUTABLE@" \ + "@BUILD_STATS_SRC_DIR@/wrapper/magic_wrapper.py" "$@" +fi diff --git a/trilinos_source15/commonTools/build_stats/cmake/Dependencies.cmake b/trilinos_source15/commonTools/build_stats/cmake/Dependencies.cmake new file mode 100644 index 0000000000..a0867faead --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/cmake/Dependencies.cmake @@ -0,0 +1 @@ +TRIBITS_PACKAGE_DEFINE_DEPENDENCIES() diff --git a/trilinos_source15/commonTools/build_stats/gather_build_stats.py b/trilinos_source15/commonTools/build_stats/gather_build_stats.py new file mode 100755 index 0000000000..0cda143c9d --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/gather_build_stats.py @@ -0,0 +1,316 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import csv + +from FindTribitsCiSupportDir import * +import GeneralScriptSupport as GSS +import CDashQueryAnalyzeReport as CDQAR + +from BuildStatsData import * + + +# +# Helper functions +# + + +# Robustly read all CSV build stats *.timing files under a base dir and return +# as a list of dicts (LOD). +# +# This robustly deals with *.timing files and discards any *.timing files that +# have any problems. +# +def readAllValidTimingFiles(baseDir, printErrMsg=True, printStats=False): + listOfAllTimingFiles = getListOfAllTimingFiles(baseDir) + if printStats: + print("Number of *.timing files found = "+str(len(listOfAllTimingFiles))) + allValidTimingFilesLOD = [] + for timingFile in listOfAllTimingFiles: + timingFileFullPath = baseDir+"/"+timingFile + (buildStatsTimingDict, errMsg) = \ + readBuildStatsTimingFileIntoDict(timingFileFullPath) + if errMsg != "" and printErrMsg: + print(errMsg) + if not buildStatsTimingDict == None: + allValidTimingFilesLOD.append(buildStatsTimingDict) + if printStats: + print("Number of valid *.timing files found = "+str(len(allValidTimingFilesLOD))) + return allValidTimingFilesLOD + + +# Robustly read a CSV build stats timing file created by magic_wrapper.py and +# return as dict. +# +# Returns tuple: +# +# (timingBuildStatsDict, errorMsg) +# +# If the timing build stats file 'buildStatsTimingFile' exists and has valid +# data, then 'timingBuildStatsDict' will be a simple dict with the contents of +# the CSV file. Otherwise, 'timingBuildStatsDict' will be 'None' and 'errMsg' +# will contain the error message. +# +# The provides for a very robust reading of these timing build stats files in +# case there are problems with the running of the magic_wrapper.py tool. +# +def readBuildStatsTimingFileIntoDict(buildStatsTimingFile): + + # Output data initialization + buildStatsTimingDict = None + errMsg = "" + + (listOfDicts, errMsg) = robustReadCsvFileIntoListOfDicts(buildStatsTimingFile) + + if errMsg == "" and not len(listOfDicts) == 1: + errMsg = buildStatsTimingFile+": ERROR: Contains "+\ + str(len(listOfDicts))+" != 1 data rows!" + + if listOfDicts != None and errMsg == "": + # No errors found to this point, so grab the first row as the build stats dict + buildStatsTimingDict = listOfDicts[0] + + if buildStatsTimingDict != None and errMsg == "": + errMsgBody = checkBuildStatsTimingDictHasError(buildStatsTimingDict) + if errMsgBody != "": + errMsg = buildStatsTimingFile+": "+errMsgBody + + if buildStatsTimingDict != None and errMsg == "": + normalizeFileNameFieldInDict(buildStatsTimingDict) + + if errMsg != "": + buildStatsTimingDict = None + + return (buildStatsTimingDict, errMsg) + + +# Call readCsvFileIntoListOfDicts() but make robust to basic read errors. +# +# Returns: +# +# (listOfDicts, errMsg) +# +# Returns a valid list of dicts listOfDicts!=None unless some error occurs. +# If some error occured, then errMsg will be sets to a string descrdibing what +# the problem was. +# +# No exception should ever be thrown from this function. This is useful to +# use in cases where the existance or basic structure of a CSV file may be +# broken and we want to ignore or gracefully deal with invalid files. +# +def robustReadCsvFileIntoListOfDicts(csvFile): + listOfDicts = None + errMsg = "" + if os.path.exists(csvFile): + try: + listOfDicts = CDQAR.readCsvFileIntoListOfDicts(csvFile) + except Exception as exceptObj: + if str(exceptObj).find("is empty which is not allowed") != -1: + errMsg = csvFile+": ERROR: File is empty!" + else: + errMsg = csvFile+": ERROR: "+str(exceptObj) + # NOTE: The above check is tied pretty tighlty to the implementation of + # readCsvFileIntoListOfDicts() in looking for a specific substring in + # the error message but it will still capture any other error as well + # and report it through errMsg. + else: + errMsg = csvFile+": ERROR: File does not exist!" + return (listOfDicts, errMsg) +# ToDo: Move the above function to CsvFileUtils.py! + + +# Assert that a build stats timing dict contains the required fields and has +# valid data in those required field. +# +# Returns: +# +# errMsg +# +# Returns errMsg=="" if there is no error. Otherwise, errMsg describes the +# nature of the error. +# +def checkBuildStatsTimingDictHasError(buildStatsTimingDict): + errMsg = "" + for stdBuildStatColAndType in getStdBuildStatsColsAndTypesList(): + requiredFieldName = stdBuildStatColAndType.colName() + requiredFieldType = stdBuildStatColAndType.colType() + strVal = buildStatsTimingDict.get(requiredFieldName, None) + if strVal == None: + errMsg = "ERROR: The required field '"+requiredFieldName+"' is missing!" + break + try: + convertedVal = stdBuildStatColAndType.convertFromStr(strVal) + except Exception as exceptObj: + errMsg = "ERROR: For field '"+requiredFieldName+"' the string value '"+strVal+"'"+\ + " could not be converted to the expected type '"+requiredFieldType+"'!" + return errMsg + + +# Normalize the 'FileName' field value +# +def normalizeFileNameFieldInDict(aDict): + fileName = aDict.get('FileName') + if fileName.startswith("./"): + aDict.update({'FileName':fileName[2:]}) + + +# Get list of all *.timing files under baseDir and return paths relative to +# baseDir. +# +# This does not read the contents of any of the timing files, it just returns +# a list of all of them. +# +def getListOfAllTimingFiles(baseDir): + listOfAllTimingFiles = [] + for root, subdirs, files in os.walk(baseDir): + if root == baseDir: relRoot = "" + else: relRoot = root.replace(baseDir+"/","") + for aFile in files: + if aFile.endswith(".timing"): + aFileRelPath = os.path.join(relRoot, aFile) + listOfAllTimingFiles.append(aFileRelPath) + return listOfAllTimingFiles + + +# Fill in dict of lists for combined info from a list of dicts +# +# The output dict of lists will have the superset of keys from all of the +# input dicts in the listOfDicts and any non-existent values will be given the +# empty string "" instead of `None`. +# +def getDictOfListsFromListOfDicts(listOfDicts, printStats=False): + numTotalRows = len(listOfDicts) + supersetOfFieldNamesList = getSupersetOfFieldNamesList(listOfDicts) + if printStats: + print( + "Combined build-stats keys sorted:\n"+\ + " "+str(supersetOfFieldNamesList) ) + dictOfLists = {} + # Create dict of lists with all empty values + for keyName in supersetOfFieldNamesList: + dictOfLists.update( { keyName : [""] * numTotalRows } ) + # Fill in the values from the dicts in the list + for i in range(numTotalRows): + aDict = listOfDicts[i] + for key, value in aDict.items(): + dictOfLists.get(key)[i] = value + # Return the completed data-structure + return dictOfLists + + +# Get superset of all of the field names for a list of dicts +# +def getSupersetOfFieldNamesList(listOfDicts): + supersetOfFieldNames = set() + for aDict in listOfDicts: + supersetOfFieldNames = supersetOfFieldNames.union(aDict.keys()) + return sorted(list(supersetOfFieldNames)) + + +# Write a dict of lists to a CSV File +# +# Note, this writes the column names (keys) in sorted order. +# +def writeDictOfListsToCsvFile(dictOfLists, csvFile): + keysList = sorted(dictOfLists.keys()) + if len(keysList) > 0: + numTotalRows = len(dictOfLists.get(keysList[0])) # All lists are same length + else: + numTotalRows = 0 + numTotalKeys = len(keysList) + with open(csvFile, "w") as csvFileHandle: + csvWriter = csv.writer(csvFileHandle, delimiter=",", lineterminator="\n") + csvWriter.writerow(keysList) + for i in range(numTotalRows): + rowList = [] + for aKey in keysList: + rowList.append(dictOfLists.get(aKey)[i]) + csvWriter.writerow(rowList) +# ToDo: Move the above function to CsvFileUtils.py! + + +# +# Helper functions for main() +# + + +# +# Help message +# + + +def getRequiredFieldsAndTypesDocStr(): + docStr = "" + for stdBuildStatColAndType in getStdBuildStatsColsAndTypesList(): + requiredFieldName = stdBuildStatColAndType.colName() + requiredFieldType = stdBuildStatColAndType.colType() + docStr += " "+requiredFieldName+" : "+requiredFieldType+"\n" + return docStr + + +usageHelp = r""" + +Gather up build stats from *.timing CSV files under the given base directory +created by the magic_wrapper.py tool as a byproduct of building the various +targets in a project. + +This will discard the data from any *.timing file that does not have valid +values for the required minimum column headers/fields with types: + +"""+getRequiredFieldsAndTypesDocStr()+r""" + +or if any other problems are found with a *.timing file. + +The column headers in all of the *.timing files are combined into one superset +in the generated 'buildStatsCsvFile' file and the columns are listed in sorted +order. (The values for any fields missing in a *.timing file are given the null +string ''.) +""" + + +def injectCmndLineOptionsInParser(clp): + + clp.add_argument( + "--base-dir", "-d", dest="baseDir", default="", + help="Base directory for project build dir containing the *.timing files."+\ + " [default is current working directory]" ) + + clp.add_argument( + "--verbose", "-v", dest="verbose", action="store_true", default=False, + help="Provide verbose output." ) + + clp.add_argument("buildStatsCsvFile", nargs='?', default="build_stats.csv", + help="The build status CSV file to created on output."+\ + " [default is 'build_stats.csv' in current working directory]" ) + + +def getCmndLineOptions(): + from argparse import ArgumentParser, RawDescriptionHelpFormatter + clp = ArgumentParser(description=usageHelp, + formatter_class=RawDescriptionHelpFormatter) + injectCmndLineOptionsInParser(clp) + options = clp.parse_args(sys.argv[1:]) + if options.baseDir == "": + options.baseDir = os.getcwd() + elif not os.path.exists(options.baseDir): + print("Error, the base dir '"+options.baseDir+"' does not exist!") + return options + + +# +# Main() +# + +if __name__ == '__main__': + inOptions = getCmndLineOptions() + if inOptions.verbose: + print("Reading all *.timing files from under '"+inOptions.baseDir+"' ...") + allValidTimingFilesListOfDicts = readAllValidTimingFiles(inOptions.baseDir, + printStats=inOptions.verbose) + allValidTimingFilesDictOfLists = \ + getDictOfListsFromListOfDicts(allValidTimingFilesListOfDicts, + printStats=inOptions.verbose) + writeDictOfListsToCsvFile(allValidTimingFilesDictOfLists, + inOptions.buildStatsCsvFile) + if inOptions.verbose: + print("Wrote file '"+inOptions.buildStatsCsvFile+"'") diff --git a/trilinos_source15/commonTools/build_stats/get_cwd_for_python.py b/trilinos_source15/commonTools/build_stats/get_cwd_for_python.py new file mode 100644 index 0000000000..95f793fdbc --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/get_cwd_for_python.py @@ -0,0 +1,2 @@ +import os +print(os.getcwd()) diff --git a/trilinos_source15/commonTools/build_stats/remove_all_target_timing_files.sh b/trilinos_source15/commonTools/build_stats/remove_all_target_timing_files.sh new file mode 100755 index 0000000000..4bcd1b82bf --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/remove_all_target_timing_files.sh @@ -0,0 +1,10 @@ +#!/bin/bash -e + +base_dir=$1 ; shift +if [[ "${base_dir}" == "" ]] ; then + "Error, must provide base dir as first argument!" + exit 1 +fi + +find ${base_dir} -name "*.timing" -exec rm {} \; +#find ${base_dir} -name "*.timing" -exec ls -l {} \; diff --git a/trilinos_source15/commonTools/build_stats/summarize_build_stats.py b/trilinos_source15/commonTools/build_stats/summarize_build_stats.py new file mode 100755 index 0000000000..3bdef3cf27 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/summarize_build_stats.py @@ -0,0 +1,365 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import csv +from decimal import Decimal + +from FindTribitsCiSupportDir import * +import GeneralScriptSupport as GSS +import CDashQueryAnalyzeReport as CDQAR + +from BuildStatsData import * + + +# +# Helper functions +# + + +# Round a float to n decimal places +# +def roundNum(numIn, numDecPlaces): + return float(round(Decimal(numIn), numDecPlaces)) + + +# Read a CSV file of build stats into a dict of lists for just the fields we +# want. +# +# Returns a dict of lists where each key is the column/field name and the +# value is an array of data for that field. +# +def readBuildStatsCsvFileIntoDictOfLists(buildStatusCsvFileName, + computeStdScaledFields=True, normalizeFileName=True, + ): + buildStatsDOL = readCsvFileIntoDictOfLists(buildStatusCsvFileName, + getStdBuildStatsColsAndTypesList() ) + if not buildStatsDOL: + return {} + if computeStdScaledFields: + addStdScaledBuildStatsFields(buildStatsDOL) + if normalizeFileName: + normalizeFileNameField(buildStatsDOL) + return buildStatsDOL + + +# Read in a CSV file as a dict of lists. +# +def readCsvFileIntoDictOfLists(csvFileName, colNameAndTypeList): + dictOfLists = {} + with open(csvFileName, 'r') as csvFile: + csvReader = csv.reader(csvFile) + # Get the list of col headers and the index to the col headers we want + columnHeadersList = \ + CDQAR.getColumnHeadersFromCsvFileReader(csvFileName, csvReader) + if len(columnHeadersList) == 0: + # File is empty so just return an empty distOfLists! + return dictOfLists + colNameTypeIdxList = \ + getColNameTypeIdxListGivenColNameAndTypeList(csvFileName, columnHeadersList, + colNameAndTypeList) + # Initial empty lists for each column to hold the data + for colNameTypeIdx in colNameTypeIdxList: + dictOfLists.update( { colNameTypeIdx.colName() : [] } ) + # Fill the columns of data + dataRowIdx = 0 + for lineList in csvReader: + if not lineList: continue # Ignore blank line + CDQAR.stripWhiltespaceFromStrList(lineList) + assertNumExpectedCsvFileLineEntries(csvFileName, columnHeadersList, + dataRowIdx, lineList) + # Read the row entries + for colNameTypeIdx in colNameTypeIdxList: + dictOfLists[colNameTypeIdx.colName()].append( + colNameTypeIdx.convertFromStr(lineList[colNameTypeIdx.getIdx()]) ) + # Update for next row + dataRowIdx += 1 + # Return completed dict of lists + return dictOfLists +# ToDo: Move above function into CsvFileUtils.py + + +def assertNumExpectedCsvFileLineEntries(csvFileName, columnHeadersList, + dataRowIdx, csvLineList, + ): + if len(columnHeadersList) != len(csvLineList): + raise Exception( + "Error, the CSV file '"+csvFileName+"' has "+str(len(columnHeadersList))+\ + " column headers but data row "+str(dataRowIdx)+" only has "+\ + str(len(csvLineList))+" entries!" ) + + +# Add standard scaled fields to read-in build stats dict of lists +# +def addStdScaledBuildStatsFields(buildStatsDOL): + addNewFieldByScalingExistingField(buildStatsDOL, 'max_resident_size_Kb', + 1.0/1024, 2, 'max_resident_size_mb') + addNewFieldByScalingExistingField(buildStatsDOL, 'FileSize', + 1.0/(1024*1024), 2, 'file_size_mb') + + +# Scale an existing field to create a new field +# +def addNewFieldByScalingExistingField(dictOfLists, existingFieldName, + scaleFactor, decimalPlaces, newFieldName, + ): + existingFieldDataList = dictOfLists[existingFieldName] + newFieldDataList = [] + for entry in existingFieldDataList: + newEntry = roundNum(scaleFactor*entry, decimalPlaces) + newFieldDataList.append(newEntry) + dictOfLists.update( {newFieldName : newFieldDataList} ) + + +# Normize the FileName field to remove beginning './' +# +def normalizeFileNameField(dictOfLists): + fileNameList = dictOfLists.get('FileName') + i = 0 + while i < len(fileNameList): + fileName = fileNameList[i] + if (len(fileName) > 1) and (fileName[0:2] == './'): + fileNameList[i] = fileName[2:] + i += 1 + + +# Bin the build stats dict of lists by subdirs under a given list of dirs for +# the 'FileName' field. +# +def binBuildStatsDictOfListsBySubdirUnderDirs( + buildStatsDOL, + binBySubdirsUnderDirsList, + ): + binnedBuildStatsDOL_dict = {} + numTotalRows = len(buildStatsDOL.get('FileName')) + row_idx = 0 + while row_idx < numTotalRows: + fileName = buildStatsDOL.get('FileName')[row_idx] + for baseDir in binBySubdirsUnderDirsList: + if fileName.startswith(baseDir+"/"): + subdirUnderBaseDir = getSubdirUnderBaseDir(baseDir, fileName) + fileNameBuildStatsDOL = \ + binnedBuildStatsDOL_dict.setdefault(subdirUnderBaseDir, {}) + addRowFromDictOfListsToDictOfLists(buildStatsDOL, row_idx, + fileNameBuildStatsDOL) + row_idx += 1 + # + return BuildStatsBinnedBySubdirs(buildStatsDOL, binnedBuildStatsDOL_dict) + + +class BuildStatsBinnedBySubdirs(object): + def __init__(self, fullBuildStatsDOL, binnedBuildStatsDOL_dict): + self.fullBuildStatsDOL = fullBuildStatsDOL + self.binnedBuildStatsDOL_dict = binnedBuildStatsDOL_dict + + +def getSubdirUnderBaseDir(baseDir, fileName): + lenBaseDir = len(baseDir) + positionOfDirCharAfterSubDir = fileName.find("/", lenBaseDir+2) + subdirUnderBaseDir = fileName[lenBaseDir+1 : positionOfDirCharAfterSubDir] + return subdirUnderBaseDir + + +def addRowFromDictOfListsToDictOfLists(inputDOL, row_idx, inoutDOL): + keysList = inputDOL.keys() + for key in keysList: + keyValList = inoutDOL.setdefault(key, []) + keyValList.append(inputDOL.get(key)[row_idx]) + + +# Compute summary info about a sinlgle build stat from a dict of list of build +# stats +# +def computeBuildStatusSummaryForOneField(buildStatsDOL, fieldName, decimalPlaces): + buildStatList = buildStatsDOL[fieldName] + fileNameList = buildStatsDOL['FileName'] + # Set easy fields + buildStatSummary = BuildStatSummary(fieldName) + buildStatSummary.numValues = len(buildStatList) + buildStatSummary.sumValue = roundNum(sum(buildStatList), decimalPlaces) + # Compute max and the corresponding filename + maxValue = 0 + maxFileName = "" + for i in range(buildStatSummary.numValues): + buildStat = buildStatList[i] + fileName = fileNameList[i] + if buildStat > maxValue: + maxValue = buildStat + maxFileName = fileName + buildStatSummary.maxValue = maxValue + buildStatSummary.maxFileName = maxFileName + # Return + return buildStatSummary + + +class BuildStatSummary(object): + def __init__(self, fieldName): + self.fieldName = fieldName + self.numValues = None + self.sumValue = None + self.maxValue = None + self.maxFileName = None + def __str__(self): + return "{"+\ + "fieldName="+self.fieldName+", " + \ + "numValues="+str(self.numValues)+", " + \ + "sumValue="+str(self.sumValue)+", " + \ + "maxValue="+str(self.maxValue)+", " + \ + "maxFileName="+self.maxFileName + \ + "}" + + +# Compute and return a list of standard build stats summaries from a dict of +# lists of build stats. +# +def computeStdBuildStatsSummariesSingleDOL(buildStatsDOL): + buildStatsSummariesList = [] + buildStatsSummariesList.append( + computeBuildStatusSummaryForOneField(buildStatsDOL, 'max_resident_size_mb', 2) ) + buildStatsSummariesList.append( + computeBuildStatusSummaryForOneField(buildStatsDOL, 'elapsed_real_time_sec', 2) ) + buildStatsSummariesList.append( + computeBuildStatusSummaryForOneField(buildStatsDOL, 'file_size_mb', 2) ) + return buildStatsSummariesList + + +# Compute and return the lists of standard build stats summaries for the full +# project as well as those binned by subdirs. +# +def computeStdBuildStatsSummaries(buildStatsBinnedBySubdirs): + fullBuildStatsSummariesList = \ + computeStdBuildStatsSummariesSingleDOL(buildStatsBinnedBySubdirs.fullBuildStatsDOL) + binnedBuildStatsSummariesList_dict = {} + for subdir in buildStatsBinnedBySubdirs.binnedBuildStatsDOL_dict.keys(): + binnedBuildStatsSummariesList_dict.update( + { + subdir : + computeStdBuildStatsSummariesSingleDOL( + buildStatsBinnedBySubdirs.binnedBuildStatsDOL_dict.get(subdir) ) + } + ) + return BuildStatsSummariesBinnedBySubdirs(fullBuildStatsSummariesList, + binnedBuildStatsSummariesList_dict) + + +class BuildStatsSummariesBinnedBySubdirs(object): + def __init__(self, fullBuildStatsSummariesList, binnedBuildStatsSummariesList_dict): + self.fullBuildStatsSummariesList = fullBuildStatsSummariesList + self.binnedBuildStatsSummariesList_dict = binnedBuildStatsSummariesList_dict + + +# Create an ASCII text report block for a list of build stats summaries for a +# single list of stats. +# +def createAsciiReportOfBuildStatsSummariesSingleSet(buildStatsSummariesList, + buildStatsSetName, + ): + asciiReportStr = "" + for buildStatsSummary in buildStatsSummariesList: + asciiReportStr += createAsciiReportOfOneBuildStatsSummary(buildStatsSummary, + buildStatsSetName) + return asciiReportStr + + +def createAsciiReportOfOneBuildStatsSummary(buildStatsSummary, buildStatsSetName): + # Shorter names for below + bss = buildStatsSummary + bssn = buildStatsSetName + # Create and return the report str + asciiReportStr = \ + bssn+": sum("+bss.fieldName+") = "+str(bss.sumValue)+\ + " ("+str(bss.numValues)+" entries)\n"+\ + bssn+": max("+bss.fieldName+") = "+str(bss.maxValue)+" ("+bss.maxFileName+")\n" + return asciiReportStr + + +# Create an ASCII text report block for a list of build stats summaries for a +# single list of stats. +# +def createAsciiReportOfBuildStatsSummaries(buildStatsSummariesBinnedBySubdirs): + asciiReportStr = "" + asciiReportStr += createAsciiReportOfBuildStatsSummariesSingleSet( + buildStatsSummariesBinnedBySubdirs.fullBuildStatsSummariesList, + "Full Project") + binnedBuildStatsSummariesList_dict = \ + buildStatsSummariesBinnedBySubdirs.binnedBuildStatsSummariesList_dict + for subdir in sorted(binnedBuildStatsSummariesList_dict.keys()): + asciiReportStr += "\n" + asciiReportStr += createAsciiReportOfBuildStatsSummariesSingleSet( + binnedBuildStatsSummariesList_dict.get(subdir), subdir ) + return asciiReportStr + + +# +# Helper functions for main() +# + + +# +# Help message +# + +usageHelp = r""" +Summarize gathered build stats from the the build stats CSV file and print the +report as ASCII text to STDOUT. This prints a report like: + +Full Project: sum(max_resident_size_mb) = ??? (??? entries) +Full Project: max(max_resident_size_mb) = ??? () +Full Project: max(elapsed_real_time_sec) = ??? () +Full Project: sum(elapsed_real_time_sec) = ??? (??? entries) +Full Project: sum(file_size_mb) = ??? (??? entries) +Full Project: max(file_size_mb) = ??? () + +: sum(max_resident_size_mb) = ??? (??? entries) +: max(max_resident_size_mb) = ??? () +: max(elapsed_real_time_sec) = ??? () +: sum(elapsed_real_time_sec) = ??? (??? entries) +: sum(file_size_mb) = ??? (??? entries) +: max(file_size_mb) = ??? () + +... +""" + +def injectCmndLineOptionsInParser(clp): + + clp.add_argument( + "--bin-by-subdirs-under-dirs", dest="binBySubdirsUnderDirsStr", default="", + help="List of base dirs to group results by subdir under."+\ + " Format ',,..." ) + + clp.add_argument("buildStatsCsvFile", + help="The build status CSV file created by build wappers and gathered up." ) + + +def getCmndLineOptions(): + from argparse import ArgumentParser, RawDescriptionHelpFormatter + clp = ArgumentParser(description=usageHelp, + formatter_class=RawDescriptionHelpFormatter) + injectCmndLineOptionsInParser(clp) + options = clp.parse_args(sys.argv[1:]) + if not os.path.exists(options.buildStatsCsvFile): + raise Exception( + "Error, file '"+options.buildStatsCsvFile+"' does not exist!") + return options + + +# +# Main() +# + +if __name__ == '__main__': + + inOptions = getCmndLineOptions() + + buildStatsDOL = readBuildStatsCsvFileIntoDictOfLists(inOptions.buildStatsCsvFile) + if not buildStatsDOL: + print("No build statistics to summarize!") + sys.exit(0) + addStdScaledBuildStatsFields(buildStatsDOL) + buildStatsBinnedBySubdirs = binBuildStatsDictOfListsBySubdirUnderDirs( + buildStatsDOL, inOptions.binBySubdirsUnderDirsStr.split(',') ) + buildStatsSummariesBinnedBySubdirs = computeStdBuildStatsSummaries( + buildStatsBinnedBySubdirs ) + buildStatsAsciiReport = createAsciiReportOfBuildStatsSummaries( + buildStatsSummariesBinnedBySubdirs ) + + print(buildStatsAsciiReport) diff --git a/trilinos_source15/commonTools/build_stats/unit_tests/CMakeLists.txt b/trilinos_source15/commonTools/build_stats/unit_tests/CMakeLists.txt new file mode 100644 index 0000000000..07aed0dabe --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/unit_tests/CMakeLists.txt @@ -0,0 +1,17 @@ +TRIBITS_ADD_ADVANCED_TEST( summarize_build_stats_UnitTests + OVERALL_WORKING_DIRECTORY TEST_NAME + OVERALL_NUM_MPI_PROCS 1 + TEST_0 CMND ${PYTHON_EXECUTABLE} + ARGS ${CMAKE_CURRENT_SOURCE_DIR}/summarize_build_stats_UnitTests.py -v + PASS_REGULAR_EXPRESSION "OK" + ALWAYS_FAIL_ON_NONZERO_RETURN + ) + +TRIBITS_ADD_ADVANCED_TEST( gather_build_stats_UnitTests + OVERALL_WORKING_DIRECTORY TEST_NAME + OVERALL_NUM_MPI_PROCS 1 + TEST_0 CMND ${PYTHON_EXECUTABLE} + ARGS ${CMAKE_CURRENT_SOURCE_DIR}/gather_build_stats_UnitTests.py -v + PASS_REGULAR_EXPRESSION "OK" + ALWAYS_FAIL_ON_NONZERO_RETURN + ) diff --git a/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.bad_type_filesize b/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.bad_type_filesize new file mode 100755 index 0000000000..60cdc70e7f --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.bad_type_filesize @@ -0,0 +1,2 @@ +max_resident_size_Kb,elapsed_real_time_sec,num_involuntary_context_switch,FileName,FileSize,num_filesystem_outputs +240000,3.5,46,./some/base/dir/target1.o,bad size type,20368 diff --git a/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.empty b/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.empty new file mode 100755 index 0000000000..e69de29bb2 diff --git a/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.junk b/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.junk new file mode 100755 index 0000000000..b0f800bdd0 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.junk @@ -0,0 +1,3 @@ +blab junk, %$$, *$% +for this garbage +what? diff --git a/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.missing_col_filename b/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.missing_col_filename new file mode 100755 index 0000000000..b2c16903ec --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.missing_col_filename @@ -0,0 +1,2 @@ +max_resident_size_Kb,elapsed_real_time_sec,num_involuntary_context_switch,FileSize,num_filesystem_outputs +240000,3.5,46,3300000,20368 diff --git a/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.two_data_rows b/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.two_data_rows new file mode 100755 index 0000000000..023723d651 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/unit_tests/bad_timing_build_stats_files/target1.timing.two_data_rows @@ -0,0 +1,3 @@ +max_resident_size_Kb,elapsed_real_time_sec,num_involuntary_context_switch,FileName,FileSize,num_filesystem_outputs +240000,3.5,46,./some/base/dir/target1.o,3300000,20368 +240000,3.5,46,./some/base/dir/target1.o,3300000,20368 diff --git a/trilinos_source15/commonTools/build_stats/unit_tests/build_stats.big.small.csv b/trilinos_source15/commonTools/build_stats/unit_tests/build_stats.big.small.csv new file mode 100755 index 0000000000..6f628468c7 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/unit_tests/build_stats.big.small.csv @@ -0,0 +1,22 @@ +avg_size_unshared_text_area_Kb,max_resident_size_Kb,cpu_sec_user_mode,elapsed_real_time_sec,num_involuntary_context_switch,FileName,symbol_text_local,FileSize,cpu_sec_kernel_mode,num_filesystem_outputs +0,240000,3.19,3.5,46,./commonTools/gtest/CMakeFiles/gtest.dir/gtest/gtest-all.cc.o,75,3300000,0.39,20368 +0,180000,1.38,1.5,43,packages/thyra/adapters/epetra/example/CMakeFiles/ThyraEpetraAdapters_sillyCgSolve_epetra.dir/createTridiagEpetraLinearOp.cpp.o,9,870000,0.25,5272 +0,190000,1.75,2.1,48,./packages/thyra/adapters/epetra/example/CMakeFiles/ThyraEpetraAdapters_sillyCgSolve_epetra.dir/sillyCgSolve_epetra.cpp.o,9,1480000,0.27,8816 +0,180000,1.35,1.6,39,packages/thyra/adapters/epetra/example/CMakeFiles/ThyraEpetraAdapters_sillyPowerMethod_epetra.dir/createTridiagEpetraLinearOp.cpp.o,9,870000,0.21,5272 +0,200000,1.97,2.2,42,packages/thyra/adapters/epetra/example/CMakeFiles/ThyraEpetraAdapters_sillyPowerMethod_epetra.dir/sillyPowerMethod_epetra.cpp.o,9,1500000,0.21,8928 +0,8000,0,0.0,6,packages/tpetra/classic/LinAlg/CMakeFiles/tpetraclassiclinalg.dir/Kokkos_LinAlgVersion.cpp.o,0,1300,0,24 +0,30000,0.13,0.2,15,packages/tpetra/classic/LinAlg/libtpetraclassiclinalg.so.12.17,0,7600,0.04,16 +0,2400000,0.84,1.0,32,packages/tpetra/classic/NodeAPI/CMakeFiles/tpetraclassicnodeapi.dir/Kokkos_DefaultNode.cpp.o,2,170000,0.15,1128 +0,30000,0.09,0.2,11,./packages/tpetra/classic/NodeAPI/libtpetraclassicnodeapi.so.12.17,2,120000,0.03,248 +0,700000,21.09,22.7,200,packages/rol/adapters/belos/test/vector/CMakeFiles/ROL_adapters_belos_test_vector_BelosInterface.dir/test_01.cpp.o,10,19000000,1.4,126872 +0,300000,2.24,2.5,36,packages/rol/adapters/belos/test/vector/ROL_adapters_belos_test_vector_BelosInterface.exe,10,10000000,0.29,19872 +0,730000,16.15,48.2,197,packages/rol/adapters/epetra/test/sol/CMakeFiles/ROL_adapters_epetra_test_sol_EpetraSROMSampleGenerator.dir/test_02.cpp.o,9,17000000,1.88,109384 +0,300000,2,2.3,24,packages/rol/adapters/epetra/test/sol/ROL_adapters_epetra_test_sol_EpetraSROMSampleGenerator.exe,9,8400000,0.26,16592 +0,1200000,20.87,23.0,354,packages/panzer/adapters-stk/example/assembly_engine/CMakeFiles/PanzerAdaptersSTK_assembly_example.dir/assembly_driver.cpp.o,13,20100000,2.14,125768 +0,500000,3.01,3.5,46,./packages/panzer/adapters-stk/example/assembly_engine/PanzerAdaptersSTK_assembly_example.exe,13,12000000,0.46,23704 +0,1700000,34.22,37.9,331,packages/panzer/adapters-stk/example/CurlLaplacianExample/CMakeFiles/PanzerAdaptersSTK_CurlLaplacianExample.dir/main.cpp.o,27,45000000,3.53,273192 +0,520000,3.6,4.1,51,packages/panzer/adapters-stk/example/CurlLaplacianExample/PanzerAdaptersSTK_CurlLaplacianExample.exe,27,19000000,0.47,37456 +0,680000,0.44,0.5,26,packages/adelus/src/CMakeFiles/zadelus.dir/Adelus_pcomm.cpp.o,2,380000,0.08,2344 +0,35000,0.12,0.2,20,./packages/adelus/src/CMakeFiles/zadelus.dir/AdelusVersion.cpp.o,0,74000,0.05,528 +0,64000,0.25,0.3,21,./packages/adelus/src/libzadelus.so.12.17,4,360000,0.04,712 +0,77000,0.29,0.4,85,packages/adelus/test/vector_random/Adelus_vector_random.exe,113,5200000,0.09,10280 diff --git a/trilinos_source15/commonTools/build_stats/unit_tests/build_stats.empty.csv b/trilinos_source15/commonTools/build_stats/unit_tests/build_stats.empty.csv new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/unit_tests/build_stats.empty.csv @@ -0,0 +1 @@ + diff --git a/trilinos_source15/commonTools/build_stats/unit_tests/build_stats.incomplete_row.csv b/trilinos_source15/commonTools/build_stats/unit_tests/build_stats.incomplete_row.csv new file mode 100644 index 0000000000..d6d3ba3c11 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/unit_tests/build_stats.incomplete_row.csv @@ -0,0 +1,5 @@ +avg_size_unshared_text_area_Kb,max_resident_size_Kb,cpu_sec_user_mode,elapsed_real_time_sec,num_involuntary_context_switch,FileName,symbol_text_local,FileSize,cpu_sec_kernel_mode,num_filesystem_outputs +0,240000,3.19,3.5,46,commonTools/gtest/CMakeFiles/gtest.dir/gtest/gtest-all.cc.o,75,3300000,0.39,20368 +0,180000,1.38,1.5,43,packages/thyra/adapters/epetra/example/CMakeFiles/ThyraEpetraAdapters_sillyCgSolve_epetra.dir/createTridiagEpetraLinearOp.cpp.o,9,870000,0.25,5272 +0,64000,0.25,0.3,21 +0,77000,0.29,0.4,85,packages/adelus/test/vector_random/Adelus_vector_random.exe,113,5200000,0.09,10280 diff --git a/trilinos_source15/commonTools/build_stats/unit_tests/gather_build_stats_UnitTests.py b/trilinos_source15/commonTools/build_stats/unit_tests/gather_build_stats_UnitTests.py new file mode 100644 index 0000000000..1b8e95b02a --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/unit_tests/gather_build_stats_UnitTests.py @@ -0,0 +1,361 @@ +# @HEADER +# ************************************************************************ +# +# TriBITS: Tribal Build, Integrate, and Test System +# Copyright 2013 Sandia Corporation +# +# Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation, +# the U.S. Government retains certain rights in this software. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# 3. Neither the name of the Corporation nor the names of the +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY +# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# ************************************************************************ +# @HEADER + +import os +import sys +import copy +import shutil +import unittest +import pprint + +thisScriptsDir = os.path.dirname(os.path.abspath(__file__)) +g_testBaseDir = thisScriptsDir +sys.path = [thisScriptsDir+"/.."] + sys.path +import gather_build_stats as GBS +import FindTribitsCiSupportDir +import GeneralScriptSupport as GSS + +g_pp = pprint.PrettyPrinter(indent=2) + +# Shared test data + +g_listOfDicts = [ + {'field1':'11', 'field2':'12', 'field4':'14'}, + {'field1':'21', 'field2':'22', 'field3':'23', 'field5':"25"}, + ] + + +############################################################################# +# +# Test gather_build_stats.readAllValidTimingFiles() +# +############################################################################# + + +class test_readAllValidTimingFiles(unittest.TestCase): + + def test_1(self): + baseDir = g_testBaseDir+"/dummy_build_dir" + allValidTimingFiles = GBS.readAllValidTimingFiles(baseDir, printErrMsg=False) + allValidTimingFiles_expected = [ + {'FileName': 'target4.o', + 'FileSize': '260000', + 'elapsed_real_time_sec': '1.9', + 'max_resident_size_Kb': '2000'}, + {'FileName': 'packages/pkga/src/target2.lib', + 'FileSize': '870000', + 'cpu_sec_user_mode': '1.38', + 'elapsed_real_time_sec': '1.5', + 'max_resident_size_Kb': '180000'}, + {'FileName': 'some/base/dir/target1.o', + 'FileSize': '3300000', + 'elapsed_real_time_sec': '3.5', + 'max_resident_size_Kb': '240000', + 'num_filesystem_outputs': '20368', + 'num_involuntary_context_switch': '46'}] + # NOTE: The bad timign file 'some/base/target3.timing' was gracefully + # skipped! + allValidTimingFiles.sort(key=lambda item: item.get('FileName')) # Avoid system-dependent behavior + allValidTimingFiles_expected.sort(key=lambda item: item.get('FileName')) + self.assertEqual(allValidTimingFiles, allValidTimingFiles_expected) + + +############################################################################# +# +# Test gather_build_stats.readBuildStatsTimingFileIntoDict() +# +############################################################################# + + +def readBuildStatsTimingFileIntoDictTest(testObj, buildStatsTimingFile, + numKeys_expected, buildStatsTimingDict_expected, errMsg_expected, + ): + (buildStatsTimingDict, errMsg) = GBS.readBuildStatsTimingFileIntoDict( + buildStatsTimingFile) + testObj.assertEqual(errMsg, errMsg_expected) + if numKeys_expected > 0: + testObj.assertEqual(len(buildStatsTimingDict.keys()), numKeys_expected) + testObj.assertEqual(buildStatsTimingDict, buildStatsTimingDict_expected) + + +class test_readBuildStatsTimingFileIntoDict(unittest.TestCase): + + def test_correct(self): + buildStatsTimingFile = \ + g_testBaseDir+"/dummy_build_dir/some/base/dir/target1.timing" + numKeys_expected = 6 + buildStatsTimingDict_expected = { + 'FileName': 'some/base/dir/target1.o', + 'FileSize': '3300000', + 'elapsed_real_time_sec': '3.5', + 'max_resident_size_Kb': '240000', + 'num_filesystem_outputs': '20368', + 'num_involuntary_context_switch': '46', + } + errMsg_expected = "" + readBuildStatsTimingFileIntoDictTest(self, buildStatsTimingFile, + numKeys_expected, buildStatsTimingDict_expected, errMsg_expected) + + def test_missing_fail(self): + buildStatsTimingFile = \ + g_testBaseDir+"/file_does_not_exist.timing" + numKeys_expected = 0 + buildStatsTimingDict_expected = None + errMsg_expected = buildStatsTimingFile+": ERROR: File does not exist!" + readBuildStatsTimingFileIntoDictTest(self, buildStatsTimingFile, + numKeys_expected, buildStatsTimingDict_expected, errMsg_expected) + + def test_two_data_rows_fail(self): + buildStatsTimingFile = \ + g_testBaseDir+"/bad_timing_build_stats_files/target1.timing.two_data_rows" + numKeys_expected = 0 + buildStatsTimingDict_expected = None + errMsg_expected = buildStatsTimingFile+": ERROR: Contains 2 != 1 data rows!" + readBuildStatsTimingFileIntoDictTest(self, buildStatsTimingFile, + numKeys_expected, buildStatsTimingDict_expected, errMsg_expected) + + def test_empty_fail(self): + buildStatsTimingFile = \ + g_testBaseDir+"/bad_timing_build_stats_files/target1.timing.empty" + numKeys_expected = 0 + buildStatsTimingDict_expected = None + errMsg_expected = buildStatsTimingFile+": ERROR: File is empty!" + readBuildStatsTimingFileIntoDictTest(self, buildStatsTimingFile, + numKeys_expected, buildStatsTimingDict_expected, errMsg_expected) + + def test_junk_fail(self): + buildStatsTimingFile = \ + g_testBaseDir+"/bad_timing_build_stats_files/target1.timing.junk" + numKeys_expected = 0 + buildStatsTimingDict_expected = None + errMsg_expected = buildStatsTimingFile+": ERROR: Error, for CSV file"+\ + " '"+buildStatsTimingFile+"' the data row 0 ['for this garbage'] has 1 entries"+\ + " which does not macth the number of column headers 3!" + readBuildStatsTimingFileIntoDictTest(self, buildStatsTimingFile, + numKeys_expected, buildStatsTimingDict_expected, errMsg_expected) + # NOTE: The above test is very much tied to the implementation of + # readCsvFileIntoListOfDicts() for the error message it puts out. That is + # very + + def test_missing_col_filename_fail(self): + buildStatsTimingFile = \ + g_testBaseDir+"/bad_timing_build_stats_files/target1.timing.missing_col_filename" + numKeys_expected = 0 + buildStatsTimingDict_expected = None + errMsg_expected = \ + buildStatsTimingFile+": ERROR: The required field 'FileName' is missing!" + readBuildStatsTimingFileIntoDictTest(self, buildStatsTimingFile, + numKeys_expected, buildStatsTimingDict_expected, errMsg_expected) + + def test_bad_type_filesize_fail(self): + buildStatsTimingFile = \ + g_testBaseDir+"/bad_timing_build_stats_files/target1.timing.bad_type_filesize" + numKeys_expected = 0 + buildStatsTimingDict_expected = None + errMsg_expected = \ + buildStatsTimingFile+": ERROR: For field 'FileSize' the string value"+\ + " 'bad size type' could not be converted to the expected type 'float'!" + readBuildStatsTimingFileIntoDictTest(self, buildStatsTimingFile, + numKeys_expected, buildStatsTimingDict_expected, errMsg_expected) + + +############################################################################# +# +# Test gather_build_stats.writeDictOfListsToCsvFile() +# +############################################################################# + +class test_writeDictOfListsToCsvFile(unittest.TestCase): + + def test_1(self): + dictOfLists = GBS.getDictOfListsFromListOfDicts(g_listOfDicts) + csvFile = "test_writeDictOfListsToCsvFile_build_stats.csv" + csvFileText_expected = \ + "field1,field2,field3,field4,field5\n11,12,,14,\n21,22,23,,25\n" + GBS.writeDictOfListsToCsvFile(dictOfLists, csvFile) + with open(csvFile, 'r') as csvFileHandle: + csvFileText = csvFileHandle.read() + self.assertEqual(csvFileText, csvFileText_expected) + + +############################################################################# +# +# Test gather_build_stats.getListOfAllTimingFiles() +# +############################################################################# + + +class test_getListOfAllTimingFiles(unittest.TestCase): + + def test_1(self): + baseDir = g_testBaseDir+"/dummy_build_dir" + listOfAllTimingFiles = GBS.getListOfAllTimingFiles(baseDir) + listOfAllTimingFiles_expected = [ + 'packages/pkga/src/target2.timing', + 'some/base/dir/target1.timing', + 'some/base/target3.timing', + 'target4.timing', + ] + listOfAllTimingFiles.sort() # Avoid system-dependent behavior + listOfAllTimingFiles_expected.sort() + self.assertEqual(listOfAllTimingFiles, listOfAllTimingFiles_expected) + + +############################################################################# +# +# Test gather_build_stats.getDictOfListsFromListOfDicts() +# +############################################################################# + +class test_getDictOfListsFromListOfDicts(unittest.TestCase): + + def test_1(self): + dictOfLists = GBS.getDictOfListsFromListOfDicts(g_listOfDicts) + dictOfLists_expected = { + 'field1': ['11', '21'], + 'field2': ['12', '22'], + 'field3': ['', '23'], + 'field4': ['14', ''], + 'field5': ['', '25'], + } + self.assertEqual(dictOfLists, dictOfLists_expected) + + +############################################################################# +# +# Test gather_build_stats.getSupersetOfFieldNamesList() +# +############################################################################# + +class test_getSupersetOfFieldNamesList(unittest.TestCase): + + def test_1(self): + supersetOfFieldNamesList = GBS.getSupersetOfFieldNamesList(g_listOfDicts) + supersetOfFieldNamesList_expected = \ + ['field1', 'field2', 'field3', 'field4', 'field5'] + supersetOfFieldNamesList.sort() # Make system independent + supersetOfFieldNamesList_expected.sort() + self.assertEqual(supersetOfFieldNamesList, supersetOfFieldNamesList_expected) + + + +############################################################################# +# +# Test gather_build_stats.py +# +############################################################################# + + +csvFileText_expected = \ + "FileName,FileSize,cpu_sec_user_mode,elapsed_real_time_sec,max_resident_size_Kb,num_filesystem_outputs,num_involuntary_context_switch\n"+\ + "target4.o,260000,,1.9,2000,,\n"+\ + "some/base/dir/target1.o,3300000,,3.5,240000,20368,46\n"+\ + "packages/pkga/src/target2.lib,870000,1.38,1.5,180000,,\n" + + +def gather_build_stats_py_expected_output(csvFile): + return \ + "Reading all *.timing files from under '"+g_testBaseDir+"/dummy_build_dir' ...\n"+\ + "Number of *.timing files found = 4\n"+\ + g_testBaseDir+"/dummy_build_dir/some/base/target3.timing: ERROR: Contains 0 != 1 data rows!\n"+\ + "Number of valid *.timing files found = 3\n"+\ + "Combined build-stats keys sorted:\n"+\ + " ['FileName', 'FileSize', 'cpu_sec_user_mode', 'elapsed_real_time_sec', 'max_resident_size_Kb', 'num_filesystem_outputs', 'num_involuntary_context_switch']\n"+\ + "Wrote file '"+csvFile+"'\n" + + +def sortCsvFileTextList(csvFileText): + csvFileTextList_orig = csvFileText.split('\n') + csvFileTextList = [] + csvFileTextList.append(csvFileTextList_orig[0]) # Headers + csvFileTextList.extend(sorted(csvFileTextList_orig[1:])) # Rows + return csvFileTextList + + +def test_gather_build_stats_py_body(testObj, csvFile, cmnd, silentStdout=False): + output = GSS.getCmndOutput(cmnd) + #print("output:\n"+output) + with open(csvFile, 'r') as csvFileHandle: + csvFileText = csvFileHandle.read() + testObj.assertEqual( + sortCsvFileTextList(csvFileText), + sortCsvFileTextList(csvFileText_expected)) + if not silentStdout: + testObj.assertEqual( + output.split('\n'), + gather_build_stats_py_expected_output(csvFile).split('\n')) + + +class test_gather_build_stats_py(unittest.TestCase): + + def test_help(self): + cmnd = thisScriptsDir+"/../gather_build_stats.py --help" + output = GSS.getCmndOutput(cmnd) + #print("output:\n"+output+"\n") + self.assertTrue(output.find("Gather up build stats from *.timing CSV files")!=-1) + self.assertTrue(output.find("max_resident_size_Kb : float")!=-1) + self.assertTrue(output.find("FileName : string")!=-1) + self.assertTrue(output.find("The column headers in all of the *.timing files are combined")!=-1) + + def test_default_out_file(self): + csvFile = "build_stats.csv" + cmnd = thisScriptsDir+"/../gather_build_stats.py"+\ + " -d "+g_testBaseDir+"/dummy_build_dir" + test_gather_build_stats_py_body(self, csvFile, cmnd, silentStdout=True) + + def test_default_out_file_verbose(self): + csvFile = "build_stats.csv" + cmnd = thisScriptsDir+"/../gather_build_stats.py -v"+\ + " -d "+g_testBaseDir+"/dummy_build_dir" + test_gather_build_stats_py_body(self, csvFile, cmnd) + + def test_explicit_out_file_verbose(self): + csvFile = "test_gather_build_stats_py_build_stats.csv" + cmnd = thisScriptsDir+"/../gather_build_stats.py -v"+\ + " -d "+g_testBaseDir+"/dummy_build_dir "+csvFile + test_gather_build_stats_py_body(self, csvFile, cmnd) + + +# +# Run the unit tests! +# + +if __name__ == '__main__': + + unittest.main() diff --git a/trilinos_source15/commonTools/build_stats/unit_tests/summarize_build_stats_UnitTests.py b/trilinos_source15/commonTools/build_stats/unit_tests/summarize_build_stats_UnitTests.py new file mode 100644 index 0000000000..45e711abb8 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/unit_tests/summarize_build_stats_UnitTests.py @@ -0,0 +1,730 @@ +# @HEADER +# ************************************************************************ +# +# TriBITS: Tribal Build, Integrate, and Test System +# Copyright 2013 Sandia Corporation +# +# Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation, +# the U.S. Government retains certain rights in this software. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# 3. Neither the name of the Corporation nor the names of the +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY +# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# ************************************************************************ +# @HEADER + +import os +import sys +import copy +import shutil +import unittest +import pprint +from decimal import Decimal + +thisScriptsDir = os.path.dirname(os.path.abspath(__file__)) +g_testBaseDir = thisScriptsDir +sys.path = [thisScriptsDir+"/.."] + sys.path +import summarize_build_stats as SBS +import FindTribitsCiSupportDir +import GeneralScriptSupport as GSS + +g_pp = pprint.PrettyPrinter(indent=2) + + +# Get a copied dict of lists of build stats read from input file +# +def getBuildStatsForTests(computeStdScaledFields=True): + global g_buildStatsDOL + if not g_buildStatsDOL: + g_buildStatsDOL = SBS.readBuildStatsCsvFileIntoDictOfLists( + g_testBaseDir+"/build_stats.big.small.csv", + computeStdScaledFields=computeStdScaledFields, + ) + return copy.deepcopy(g_buildStatsDOL) + +g_buildStatsDOL = None + +# Note: It is structured this way above because we don't want the unit test +# for the function readBuildStatsCsvFileIntoDictOfLists() to be unable to run if a defect is added to it. + + +############################################################################# +# +# Test summarize_build_stats.readBuildStatsCsvFileIntoDictOfLists() +# +############################################################################# + + +class test_readBuildStatsCsvFileIntoDictOfLists(unittest.TestCase): + + def test_build_stats_big_little(self): + buildStatsDOL = SBS.readBuildStatsCsvFileIntoDictOfLists( + g_testBaseDir+"/build_stats.big.small.csv", computeStdScaledFields=False ) + numCols_expected = 4 + numRows_expected = 21 + self.assertEqual(len(buildStatsDOL.keys()), numCols_expected) + self.assertEqual(len(buildStatsDOL['max_resident_size_Kb']),numRows_expected) + self.assertEqual(len(buildStatsDOL['elapsed_real_time_sec']), numRows_expected) + self.assertEqual(len(buildStatsDOL['FileName']), numRows_expected) + self.assertEqual(len(buildStatsDOL['FileSize']), numRows_expected) + self.assertEqual(buildStatsDOL['max_resident_size_Kb'][0], 240000) + self.assertEqual(buildStatsDOL['max_resident_size_Kb'][11], 730000) + self.assertEqual(buildStatsDOL['max_resident_size_Kb'][20], 77000) + self.assertEqual(buildStatsDOL['elapsed_real_time_sec'][0], 3.5) + self.assertEqual(buildStatsDOL['elapsed_real_time_sec'][11], 48.2) + self.assertEqual(buildStatsDOL['elapsed_real_time_sec'][20], 0.4) + self.assertEqual(buildStatsDOL['FileName'][0], + "commonTools/gtest/CMakeFiles/gtest.dir/gtest/gtest-all.cc.o") + self.assertEqual(buildStatsDOL['FileName'][11], + "packages/rol/adapters/epetra/test/sol/CMakeFiles/ROL_adapters_epetra_test_sol_EpetraSROMSampleGenerator.dir/test_02.cpp.o") + self.assertEqual(buildStatsDOL['FileName'][20], + "packages/adelus/test/vector_random/Adelus_vector_random.exe") + self.assertEqual(buildStatsDOL['FileSize'][0], 3300000) + self.assertEqual(buildStatsDOL['FileSize'][11], 17000000) + self.assertEqual(buildStatsDOL['FileSize'][20], 5200000) + +# NOTE: Above tests also indireclty tests +# summarize_build_stats.readCsvFileIntoDictOfLists()! + + +############################################################################# +# +# Test summarize_build_stats.readCsvFileIntoDictOfLists() +# +############################################################################# + + +def cnat(colName, colType="string"): + return SBS.ColNameAndType(colName, colType) + + +class test_readCsvFileIntoDictOfLists(unittest.TestCase): + + def test_invalid_col_header(self): + threwExcept = True + try: + buildStatsDOL = SBS.readCsvFileIntoDictOfLists( + g_testBaseDir+"/build_stats.big.small.csv", + [ + cnat('missing_header', 'float'), + ] + ) + threwExcept = False + except Exception as exceptObj: + errMsg = str(exceptObj) + subStrIdx = errMsg.find( + "Error, the CSV file column header 'missing_header' does not exist") + self.assertEqual(subStrIdx, 0) + # ToDo: Do a better match with fail msg above + if not threwExcept: + self.assertFalse("ERROR: Did not thown an excpetion") + + + def test_row_missing_elements(self): + threwExcept = True +# buildStatsDOL = SBS.readCsvFileIntoDictOfLists( +# g_testBaseDir+"/build_stats.incomplete_row.csv", +# [ +# cnat('max_resident_size_Kb', 'float'), +# cnat('elapsed_real_time_sec', 'float'), +# ] +# ) + try: + buildStatsDOL = SBS.readCsvFileIntoDictOfLists( + g_testBaseDir+"/build_stats.incomplete_row.csv", + [ + cnat('max_resident_size_Kb', 'float'), + cnat('elapsed_real_time_sec', 'float'), + ] + ) + threwExcept = False + except Exception as exceptObj: + errMsg = str(exceptObj) + errMsgSubStrExpected = \ + "build_stats.incomplete_row.csv' has 10 column headers but data row 2"+\ + " only has 5 entries" + subStrIdx = errMsg.find(errMsgSubStrExpected) + self.assertNotEqual(subStrIdx, -1) + # ToDo: Do a better match with fail msg above + if not threwExcept: + self.assertFalse("ERROR: Did not thown an excpetion") + + +############################################################################# +# +# Test summarize_build_stats.getColNameTypeIdxListGivenColNameAndTypeList() +# +############################################################################# + + +def cnti(colName, colType, colIdx): + return SBS.ColNameTypeIdx(SBS.ColNameAndType(colName, colType), colIdx) + + +class test_getColNameTypeIdxListGivenColNameAndTypeList(unittest.TestCase): + + + def test_subset(self): + colNameAndIdxList = SBS.getColNameTypeIdxListGivenColNameAndTypeList( + "dummyFileName", + [ "aaa", "bbb", "ccc", "ddd", "eee", "fff", "ggg", "hhh" ], + [ cnat("bbb",'int'), cnat("ccc",'float'), cnat("fff",'string') ] ) + colNameAndIdxList_expected = [ + cnti("bbb",'int',1) , cnti("ccc",'float',2), cnti("fff",'string',5) ] + self.assertEqual(colNameAndIdxList, colNameAndIdxList_expected) + + + def test_missing_col_header(self): + threwExcept = True + try: + colNameAndIdxList = SBS.getColNameTypeIdxListGivenColNameAndTypeList( + "dummyFileName", + [ "aaa", "bbb", "ccc", "ddd", "eee", "fff", "ggg", "hhh" ], + [ cnat("bbb"), cnat("ccc"), cnat("ffg", "int") ] ) + threwExcept = False + except Exception as errMsg: + self.assertEqual( str(errMsg), + "Error, the CSV file column header 'ffg' does not exist in the list"+\ + " of column headers ['aaa', 'bbb', 'ccc', 'ddd', 'eee', 'fff', 'ggg', 'hhh']"+\ + " from the CSV file 'dummyFileName'!" ) + if not threwExcept: + self.assertFalse("ERROR: Did not thown an excpetion") + + + +############################################################################# +# +# Test summarize_build_stats.ColNameTypeIdx +# +############################################################################# + +class test_ColNameTypeIdx(unittest.TestCase): + + def test_float(self): + colNameTypeIdx = SBS.ColNameTypeIdx(SBS.ColNameAndType("name", "float"), 5) + asStr = str(colNameTypeIdx) + self.assertEqual(asStr, "ColNameTypeIdx{ColNameAndType{name,float},5}") + self.assertEqual(colNameTypeIdx.colName(), "name") + self.assertEqual(colNameTypeIdx.getIdx(), 5) + self.assertEqual(colNameTypeIdx.convertFromStr("10.5"), 10.5) + + def test_int(self): + colNameTypeIdx = SBS.ColNameTypeIdx(SBS.ColNameAndType("name", "int"), 4) + asStr = str(colNameTypeIdx) + self.assertEqual(asStr, "ColNameTypeIdx{ColNameAndType{name,int},4}") + self.assertEqual(colNameTypeIdx.colName(), "name") + self.assertEqual(colNameTypeIdx.getIdx(), 4) + self.assertEqual(colNameTypeIdx.convertFromStr("12"), 12) + + def test_string(self): + colNameTypeIdx = SBS.ColNameTypeIdx(SBS.ColNameAndType("name", "string"), 3) + asStr = str(colNameTypeIdx) + self.assertEqual(asStr, "ColNameTypeIdx{ColNameAndType{name,string},3}") + self.assertEqual(colNameTypeIdx.colName(), "name") + self.assertEqual(colNameTypeIdx.getIdx(), 3) + self.assertEqual(colNameTypeIdx.convertFromStr("some str"), "some str") + + def test_invalid_type(self): + threwExcept = True + try: + colNameTypeIdx = SBS.ColNameTypeIdx(SBS.ColNameAndType("name", "invalid"), 2) + threwExcept = False + except Exception as errMsg: + self.assertEqual( str(errMsg), + "Error, type 'invalid' is not supported! Supported types include"+\ + " ['string', 'int', 'float']!" ) + if not threwExcept: + self.assertFalse("ERROR: Did not thown an excpetion") + + +############################################################################# +# +# Test summarize_build_stats.addStdScaledBuildStatsFields() +# +############################################################################# + +class test_addStdScaledBuildStatsFields(unittest.TestCase): + + def test_read_in_and_create_new_fields(self): + buildStatsDOL = SBS.readBuildStatsCsvFileIntoDictOfLists( + g_testBaseDir+"/build_stats.big.small.csv" ) + SBS.addStdScaledBuildStatsFields(buildStatsDOL) + self.assertEqual(len(buildStatsDOL), 6) + self.assertEqual(len(buildStatsDOL['max_resident_size_mb']), 21) + self.assertEqual(len(buildStatsDOL['file_size_mb']), 21) + self.assertEqual(buildStatsDOL['max_resident_size_mb'][0], 234.38) + self.assertEqual(buildStatsDOL['max_resident_size_mb'][11], 712.89) + self.assertEqual(buildStatsDOL['max_resident_size_mb'][20], 75.20) + self.assertEqual(buildStatsDOL['file_size_mb'][0], 3.15) + self.assertEqual(buildStatsDOL['file_size_mb'][11], 16.21) + self.assertEqual(buildStatsDOL['file_size_mb'][20], 4.96) + + +############################################################################# +# +# Test summarize_build_stats.addNewFieldByScalingExistingField() +# +############################################################################# + +class test_addNewFieldByScalingExistingField(unittest.TestCase): + + def test_add_field_1(self): + dictOfLists = { 'field_1' : [ 1.1, 2.2, 3.3 ] } + SBS.addNewFieldByScalingExistingField(dictOfLists, 'field_1', 0.1, 2, + 'scaled_field') + self.assertEqual(len(dictOfLists.keys()), 2) + self.assertEqual(dictOfLists['field_1'], [ 1.1, 2.2, 3.3 ]) + self.assertEqual(dictOfLists['scaled_field'], [ 0.11, 0.22, 0.33 ]) + + +############################################################################# +# +# Test summarize_build_stats.binBuildStatsDictOfListsBySubdirUnderDirs() +# +############################################################################# + + +dummy1BuildStatsDOL = { + 'field1' : [ "00", "01", "02", "03", "04" ], + 'FileName' : [ + "basedir/pkg0/some_file0", + "basedir/pkg1/some_file1", + "basedir/pkg0/some_file2", + "basedir/pkg1/some_file3", + "basedir/pkg2/some_file4", + ], + 'field2' : [ "10", "11", "12", "13", "14" ], + } + +binnedDummy1BuildStatsDOL_dict = { + "pkg0": { + 'field1' : [ "00", "02" ], + 'FileName' : [ + "basedir/pkg0/some_file0", + "basedir/pkg0/some_file2", + ], + 'field2' : [ "10", "12" ], + }, + "pkg1" : { + 'field1' : [ "01", "03" ], + 'FileName' : [ + "basedir/pkg1/some_file1", + "basedir/pkg1/some_file3", + ], + 'field2' : [ "11", "13" ], + }, + "pkg2" : { + 'field1' : [ "04" ], + 'FileName' : [ + "basedir/pkg2/some_file4", + ], + 'field2' : [ "14" ], + }, + } + +dummy2BuildStatsDOL = { + 'field1' : [ "00", "01", "02", "03", "04" ], + 'FileName' : [ + "dir2/pkg0/some_file0", + "basedir/pkg1/some_file1", + "dir2/pkg0/some_file2", + "basedir/pkg1/some_file3", + "basedir/pkg2/some_file4", + ], + 'field2' : [ "10", "11", "12", "13", "14" ], + } + +binnedDummy2BuildStatsDOL_dict = { + "pkg0": { + 'field1' : [ "00", "02" ], + 'FileName' : [ + "dir2/pkg0/some_file0", + "dir2/pkg0/some_file2", + ], + 'field2' : [ "10", "12" ], + }, + "pkg1" : { + 'field1' : [ "01", "03" ], + 'FileName' : [ + "basedir/pkg1/some_file1", + "basedir/pkg1/some_file3", + ], + 'field2' : [ "11", "13" ], + }, + "pkg2" : { + 'field1' : [ "04" ], + 'FileName' : [ + "basedir/pkg2/some_file4", + ], + 'field2' : [ "14" ], + }, + } + + +class test_binBuildStatsDictOfListsBySubdirUnderDirs(unittest.TestCase): + + def test_1(self): + buildStatsDOL = dummy1BuildStatsDOL + buildStatsBinnedBySubdirs = SBS.binBuildStatsDictOfListsBySubdirUnderDirs( + buildStatsDOL, [ "basedir" ] ) + self.assertEqual(buildStatsBinnedBySubdirs.fullBuildStatsDOL, dummy1BuildStatsDOL) + binnedBuildStatsDOL_dict = buildStatsBinnedBySubdirs.binnedBuildStatsDOL_dict + self.assertEqual(len(binnedBuildStatsDOL_dict.keys()), 3) + self.assertEqual(binnedBuildStatsDOL_dict, binnedDummy1BuildStatsDOL_dict) + + def test_2(self): + buildStatsDOL = dummy2BuildStatsDOL + buildStatsBinnedBySubdirs = SBS.binBuildStatsDictOfListsBySubdirUnderDirs( + buildStatsDOL, [ "basedir", "dir2" ] ) + self.assertEqual(buildStatsBinnedBySubdirs.fullBuildStatsDOL, dummy2BuildStatsDOL) + binnedBuildStatsDOL_dict = buildStatsBinnedBySubdirs.binnedBuildStatsDOL_dict + self.assertEqual(len(binnedBuildStatsDOL_dict.keys()), 3) + self.assertEqual(binnedBuildStatsDOL_dict, binnedDummy2BuildStatsDOL_dict) + + +############################################################################# +# +# Test summarize_build_stats.computeBuildStatusSummaryForOneField() +# +############################################################################# + +class test_computeBuildStatusSummaryForOneField(unittest.TestCase): + + def test_field_1(self): + buildStatsDOL = getBuildStatsForTests() + buildStatSummary = \ + SBS.computeBuildStatusSummaryForOneField(buildStatsDOL, 'max_resident_size_mb', 2) + self.assertEqual(buildStatSummary.fieldName, 'max_resident_size_mb') + self.assertEqual(buildStatSummary.numValues, 21) + self.assertEqual(buildStatSummary.sumValue, 10023.45) + self.assertEqual(buildStatSummary.maxValue, 2400000/1024.0) + self.assertEqual(buildStatSummary.maxFileName, + 'packages/tpetra/classic/NodeAPI/CMakeFiles/tpetraclassicnodeapi.dir/Kokkos_DefaultNode.cpp.o' ) + + +############################################################################# +# +# Test summarize_build_stats.computeStdBuildStatsSummariesSingleDOL() +# +############################################################################# + + +class test_computeStdBuildStatsSummariesSingleDOL(unittest.TestCase): + + def test_big_small(self): + buildStatsDOL = getBuildStatsForTests() + bssl = SBS.computeStdBuildStatsSummariesSingleDOL(buildStatsDOL) + self.assertEqual(len(bssl), 3) + self.assertEqual(bssl[0].fieldName, 'max_resident_size_mb') + self.assertEqual(bssl[0].numValues, 21) + self.assertEqual(bssl[0].sumValue, 10023.45) + self.assertEqual(bssl[0].maxValue, 2400000/1024.0) + self.assertEqual(bssl[0].maxFileName, + 'packages/tpetra/classic/NodeAPI/CMakeFiles/tpetraclassicnodeapi.dir/Kokkos_DefaultNode.cpp.o' ) + self.assertEqual(bssl[1].fieldName, 'elapsed_real_time_sec') + self.assertEqual(bssl[1].numValues, 21) + self.assertEqual(bssl[1].sumValue, 157.9) + self.assertEqual(bssl[1].maxValue, 48.2) + self.assertEqual(bssl[1].maxFileName, + 'packages/rol/adapters/epetra/test/sol/CMakeFiles/ROL_adapters_epetra_test_sol_EpetraSROMSampleGenerator.dir/test_02.cpp.o' ) + self.assertEqual(bssl[2].fieldName, 'file_size_mb') + self.assertEqual(bssl[2].numValues, 21) + self.assertEqual(bssl[2].sumValue, 157.19) + self.assertEqual(bssl[2].maxValue, SBS.roundNum(45000000/(1024.0*1024.0),2)) + self.assertEqual(bssl[2].maxFileName, + 'packages/panzer/adapters-stk/example/CurlLaplacianExample/CMakeFiles/PanzerAdaptersSTK_CurlLaplacianExample.dir/main.cpp.o' ) + # NOTE: Above is a white-box test and we want to validate the order as that + # is also the order these stats will be displayed. + + +############################################################################# +# +# Test summarize_build_stats.computeStdBuildStatsSummaries() +# +############################################################################# + +class test_computeStdBuildStatsSummaries(unittest.TestCase): + + def test_big_small(self): + buildStatsDOL = getBuildStatsForTests() + buildStatsBinnedBySubdirs = SBS.binBuildStatsDictOfListsBySubdirUnderDirs( + buildStatsDOL, [ "commonTools", "packages" ] ) + #print("\nbuildStatsBinnedBySubdirs.fullBuildStatsDOL:") + #g_pp.pprint(buildStatsBinnedBySubdirs.fullBuildStatsDOL) + #print("buildStatsBinnedBySubdirs.binnedBuildStatsDOL_dict:") + #g_pp.pprint(buildStatsBinnedBySubdirs.binnedBuildStatsDOL_dict) + buildStatsSummariesBinnedBySubdirs = SBS.computeStdBuildStatsSummaries( + buildStatsBinnedBySubdirs ) + # Full project build stats + bssl = buildStatsSummariesBinnedBySubdirs.fullBuildStatsSummariesList + self.assertEqual(len(bssl), 3) + self.assertEqual(bssl[0].fieldName, 'max_resident_size_mb') + self.assertEqual(bssl[0].numValues, 21) + self.assertEqual(bssl[0].sumValue, 10023.45) + self.assertEqual(bssl[0].maxValue, 2400000/1024.0) + self.assertEqual(bssl[0].maxFileName, + 'packages/tpetra/classic/NodeAPI/CMakeFiles/tpetraclassicnodeapi.dir/Kokkos_DefaultNode.cpp.o' ) + self.assertEqual(bssl[1].fieldName, 'elapsed_real_time_sec') + self.assertEqual(bssl[1].numValues, 21) + self.assertEqual(bssl[1].sumValue, 157.9) + self.assertEqual(bssl[1].maxValue, 48.2) + self.assertEqual(bssl[1].maxFileName, + 'packages/rol/adapters/epetra/test/sol/CMakeFiles/ROL_adapters_epetra_test_sol_EpetraSROMSampleGenerator.dir/test_02.cpp.o' ) + self.assertEqual(bssl[2].fieldName, 'file_size_mb') + self.assertEqual(bssl[2].numValues, 21) + self.assertEqual(bssl[2].sumValue, 157.19) + self.assertEqual(bssl[2].maxValue, SBS.roundNum(45000000/(1024.0*1024.0),2)) + self.assertEqual(bssl[2].maxFileName, + 'packages/panzer/adapters-stk/example/CurlLaplacianExample/CMakeFiles/PanzerAdaptersSTK_CurlLaplacianExample.dir/main.cpp.o' ) + # Verify number of build stats summaries binned by subdirs + self.assertEqual( + len(buildStatsSummariesBinnedBySubdirs.binnedBuildStatsSummariesList_dict.keys()), + 6) + self.assertEqual( + sorted(buildStatsSummariesBinnedBySubdirs.binnedBuildStatsSummariesList_dict.keys()), + ['adelus', 'gtest', 'panzer', 'rol', 'thyra', 'tpetra']) + # Build stats for 'adelus' + bssl = buildStatsSummariesBinnedBySubdirs.binnedBuildStatsSummariesList_dict['adelus'] + #print("\nbssl[0]:"); g_pp.pprint(str(bssl[0])) + self.assertEqual(len(bssl), 3) + self.assertEqual(bssl[0].fieldName, 'max_resident_size_mb') + self.assertEqual(bssl[0].numValues, 4) + self.assertEqual(bssl[0].sumValue, SBS.roundNum((680000+35000+64000+77000)/1024.0,2)) + self.assertEqual(bssl[0].maxValue, SBS.roundNum(680000/1024.0,2)) + self.assertEqual(bssl[0].maxFileName, 'packages/adelus/src/CMakeFiles/zadelus.dir/Adelus_pcomm.cpp.o') + #print("\nbssl[1]:"); g_pp.pprint(str(bssl[1])) + self.assertEqual(bssl[1].fieldName, 'elapsed_real_time_sec') + self.assertEqual(bssl[1].numValues, 4) + self.assertEqual(bssl[1].sumValue, SBS.roundNum(0.5+0.2+0.3+0.4,2)) + self.assertEqual(bssl[1].maxValue, 0.5) + self.assertEqual(bssl[1].maxFileName, 'packages/adelus/src/CMakeFiles/zadelus.dir/Adelus_pcomm.cpp.o') + self.assertEqual(bssl[2].fieldName, 'file_size_mb') + self.assertEqual(bssl[2].numValues, 4) + self.assertEqual(bssl[2].sumValue, 5.73) + self.assertEqual(bssl[2].maxValue, SBS.roundNum(5200000/(1024.0*1024.0),2)) + self.assertEqual(bssl[2].maxFileName, 'packages/adelus/test/vector_random/Adelus_vector_random.exe') + # Build stats for 'gtest' + bssl = buildStatsSummariesBinnedBySubdirs.binnedBuildStatsSummariesList_dict['gtest'] + #print("\nbssl[0]:"); g_pp.pprint(str(bssl[0])) + self.assertEqual(len(bssl), 3) + self.assertEqual(bssl[0].fieldName, 'max_resident_size_mb') + self.assertEqual(bssl[0].numValues, 1) + self.assertEqual(bssl[0].sumValue, SBS.roundNum(240000/1024.0,2)) + self.assertEqual(bssl[0].maxValue, SBS.roundNum(240000/1024.0,2)) + self.assertEqual(bssl[0].maxFileName, 'commonTools/gtest/CMakeFiles/gtest.dir/gtest/gtest-all.cc.o') + self.assertEqual(bssl[1].fieldName, 'elapsed_real_time_sec') + self.assertEqual(bssl[1].numValues, 1) + self.assertEqual(bssl[1].sumValue, 3.5) + self.assertEqual(bssl[1].maxValue, 3.5) + self.assertEqual(bssl[1].maxFileName, 'commonTools/gtest/CMakeFiles/gtest.dir/gtest/gtest-all.cc.o') + self.assertEqual(bssl[2].fieldName, 'file_size_mb') + self.assertEqual(bssl[2].numValues, 1) + self.assertEqual(bssl[2].sumValue, SBS.roundNum(3300000/(1024.0*1024.0),2)) + self.assertEqual(bssl[2].maxValue, SBS.roundNum(3300000/(1024.0*1024.0),2)) + self.assertEqual(bssl[2].maxFileName, 'commonTools/gtest/CMakeFiles/gtest.dir/gtest/gtest-all.cc.o') + # Build stats for 'panzer' (don't bother checking values, above is good enough) + bssl = buildStatsSummariesBinnedBySubdirs.binnedBuildStatsSummariesList_dict['panzer'] + #print("\nbssl[0]:"); g_pp.pprint(str(bssl[0])) + self.assertEqual(len(bssl), 3) + self.assertEqual(bssl[0].fieldName, 'max_resident_size_mb') + self.assertEqual(bssl[0].numValues, 4) + self.assertEqual(bssl[1].fieldName, 'elapsed_real_time_sec') + self.assertEqual(bssl[1].numValues, 4) + self.assertEqual(bssl[2].fieldName, 'file_size_mb') + self.assertEqual(bssl[2].numValues, 4) + # Build stats for 'rol' (don't bother checking values, above is good enough) + bssl = buildStatsSummariesBinnedBySubdirs.binnedBuildStatsSummariesList_dict['rol'] + #print("\nbssl[0]:"); g_pp.pprint(str(bssl[0])) + self.assertEqual(len(bssl), 3) + self.assertEqual(bssl[0].fieldName, 'max_resident_size_mb') + self.assertEqual(bssl[0].numValues, 4) + self.assertEqual(bssl[1].fieldName, 'elapsed_real_time_sec') + self.assertEqual(bssl[1].numValues, 4) + self.assertEqual(bssl[2].fieldName, 'file_size_mb') + self.assertEqual(bssl[2].numValues, 4) + # Build stats for 'thyra' (don't bother checking values, above is good enough) + bssl = buildStatsSummariesBinnedBySubdirs.binnedBuildStatsSummariesList_dict['thyra'] + #print("\nbssl[0]:"); g_pp.pprint(str(bssl[0])) + self.assertEqual(len(bssl), 3) + self.assertEqual(bssl[0].fieldName, 'max_resident_size_mb') + self.assertEqual(bssl[0].numValues, 4) + self.assertEqual(bssl[1].fieldName, 'elapsed_real_time_sec') + self.assertEqual(bssl[1].numValues, 4) + self.assertEqual(bssl[2].fieldName, 'file_size_mb') + self.assertEqual(bssl[2].numValues, 4) + # Build stats for 'tpetra' (don't bother checking values, above is good enough) + bssl = buildStatsSummariesBinnedBySubdirs.binnedBuildStatsSummariesList_dict['tpetra'] + #print("\nbssl[0]:"); g_pp.pprint(str(bssl[0])) + self.assertEqual(len(bssl), 3) + self.assertEqual(bssl[0].fieldName, 'max_resident_size_mb') + self.assertEqual(bssl[0].numValues, 4) + self.assertEqual(bssl[1].fieldName, 'elapsed_real_time_sec') + self.assertEqual(bssl[1].numValues, 4) + self.assertEqual(bssl[2].fieldName, 'file_size_mb') + self.assertEqual(bssl[2].numValues, 4) + + +############################################################################# +# +# Test summarize_build_stats.createAsciiReportOfBuildStatsSummariesSingleSet() +# +############################################################################# + + +class test_createAsciiReportOfBuildStatsSummariesSingleSet(unittest.TestCase): + + def test_big_small(self): + buildStatsDOL = getBuildStatsForTests() + buildStatsSummariesList = SBS.computeStdBuildStatsSummariesSingleDOL(buildStatsDOL) + buildStatsAsciiReport = SBS.createAsciiReportOfBuildStatsSummariesSingleSet( + buildStatsSummariesList, "Full Project") + self.assertEqual(buildStatsAsciiReport, + "Full Project: sum(max_resident_size_mb) = 10023.45 (21 entries)\n"+\ + "Full Project: max(max_resident_size_mb) = 2343.75 (packages/tpetra/classic/NodeAPI/CMakeFiles/tpetraclassicnodeapi.dir/Kokkos_DefaultNode.cpp.o)\n"+\ + "Full Project: sum(elapsed_real_time_sec) = 157.9 (21 entries)\n"+\ + "Full Project: max(elapsed_real_time_sec) = 48.2 (packages/rol/adapters/epetra/test/sol/CMakeFiles/ROL_adapters_epetra_test_sol_EpetraSROMSampleGenerator.dir/test_02.cpp.o)\n"+\ + "Full Project: sum(file_size_mb) = 157.19 (21 entries)\n"+\ + "Full Project: max(file_size_mb) = 42.92 (packages/panzer/adapters-stk/example/CurlLaplacianExample/CMakeFiles/PanzerAdaptersSTK_CurlLaplacianExample.dir/main.cpp.o)\n" ) + + +############################################################################# +# +# Test summarize_build_stats.createAsciiReportOfBuildStatsSummariesSingleSet() +# +############################################################################# + + +big_small_summary_full_project_ascii = \ +r"""Full Project: sum(max_resident_size_mb) = 10023.45 (21 entries) +Full Project: max(max_resident_size_mb) = 2343.75 (packages/tpetra/classic/NodeAPI/CMakeFiles/tpetraclassicnodeapi.dir/Kokkos_DefaultNode.cpp.o) +Full Project: sum(elapsed_real_time_sec) = 157.9 (21 entries) +Full Project: max(elapsed_real_time_sec) = 48.2 (packages/rol/adapters/epetra/test/sol/CMakeFiles/ROL_adapters_epetra_test_sol_EpetraSROMSampleGenerator.dir/test_02.cpp.o) +Full Project: sum(file_size_mb) = 157.19 (21 entries) +Full Project: max(file_size_mb) = 42.92 (packages/panzer/adapters-stk/example/CurlLaplacianExample/CMakeFiles/PanzerAdaptersSTK_CurlLaplacianExample.dir/main.cpp.o) +""" + +big_small_summary_ascii = \ +big_small_summary_full_project_ascii + \ +"\n" + \ +r"""adelus: sum(max_resident_size_mb) = 835.94 (4 entries) +adelus: max(max_resident_size_mb) = 664.06 (packages/adelus/src/CMakeFiles/zadelus.dir/Adelus_pcomm.cpp.o) +adelus: sum(elapsed_real_time_sec) = 1.4 (4 entries) +adelus: max(elapsed_real_time_sec) = 0.5 (packages/adelus/src/CMakeFiles/zadelus.dir/Adelus_pcomm.cpp.o) +adelus: sum(file_size_mb) = 5.73 (4 entries) +adelus: max(file_size_mb) = 4.96 (packages/adelus/test/vector_random/Adelus_vector_random.exe) + +gtest: sum(max_resident_size_mb) = 234.38 (1 entries) +gtest: max(max_resident_size_mb) = 234.38 (commonTools/gtest/CMakeFiles/gtest.dir/gtest/gtest-all.cc.o) +gtest: sum(elapsed_real_time_sec) = 3.5 (1 entries) +gtest: max(elapsed_real_time_sec) = 3.5 (commonTools/gtest/CMakeFiles/gtest.dir/gtest/gtest-all.cc.o) +gtest: sum(file_size_mb) = 3.15 (1 entries) +gtest: max(file_size_mb) = 3.15 (commonTools/gtest/CMakeFiles/gtest.dir/gtest/gtest-all.cc.o) + +panzer: sum(max_resident_size_mb) = 3828.13 (4 entries) +panzer: max(max_resident_size_mb) = 1660.16 (packages/panzer/adapters-stk/example/CurlLaplacianExample/CMakeFiles/PanzerAdaptersSTK_CurlLaplacianExample.dir/main.cpp.o) +panzer: sum(elapsed_real_time_sec) = 68.5 (4 entries) +panzer: max(elapsed_real_time_sec) = 37.9 (packages/panzer/adapters-stk/example/CurlLaplacianExample/CMakeFiles/PanzerAdaptersSTK_CurlLaplacianExample.dir/main.cpp.o) +panzer: sum(file_size_mb) = 91.65 (4 entries) +panzer: max(file_size_mb) = 42.92 (packages/panzer/adapters-stk/example/CurlLaplacianExample/CMakeFiles/PanzerAdaptersSTK_CurlLaplacianExample.dir/main.cpp.o) + +rol: sum(max_resident_size_mb) = 1982.42 (4 entries) +rol: max(max_resident_size_mb) = 712.89 (packages/rol/adapters/epetra/test/sol/CMakeFiles/ROL_adapters_epetra_test_sol_EpetraSROMSampleGenerator.dir/test_02.cpp.o) +rol: sum(elapsed_real_time_sec) = 75.7 (4 entries) +rol: max(elapsed_real_time_sec) = 48.2 (packages/rol/adapters/epetra/test/sol/CMakeFiles/ROL_adapters_epetra_test_sol_EpetraSROMSampleGenerator.dir/test_02.cpp.o) +rol: sum(file_size_mb) = 51.88 (4 entries) +rol: max(file_size_mb) = 18.12 (packages/rol/adapters/belos/test/vector/CMakeFiles/ROL_adapters_belos_test_vector_BelosInterface.dir/test_01.cpp.o) + +thyra: sum(max_resident_size_mb) = 732.42 (4 entries) +thyra: max(max_resident_size_mb) = 195.31 (packages/thyra/adapters/epetra/example/CMakeFiles/ThyraEpetraAdapters_sillyPowerMethod_epetra.dir/sillyPowerMethod_epetra.cpp.o) +thyra: sum(elapsed_real_time_sec) = 7.4 (4 entries) +thyra: max(elapsed_real_time_sec) = 2.2 (packages/thyra/adapters/epetra/example/CMakeFiles/ThyraEpetraAdapters_sillyPowerMethod_epetra.dir/sillyPowerMethod_epetra.cpp.o) +thyra: sum(file_size_mb) = 4.5 (4 entries) +thyra: max(file_size_mb) = 1.43 (packages/thyra/adapters/epetra/example/CMakeFiles/ThyraEpetraAdapters_sillyPowerMethod_epetra.dir/sillyPowerMethod_epetra.cpp.o) + +tpetra: sum(max_resident_size_mb) = 2410.16 (4 entries) +tpetra: max(max_resident_size_mb) = 2343.75 (packages/tpetra/classic/NodeAPI/CMakeFiles/tpetraclassicnodeapi.dir/Kokkos_DefaultNode.cpp.o) +tpetra: sum(elapsed_real_time_sec) = 1.4 (4 entries) +tpetra: max(elapsed_real_time_sec) = 1.0 (packages/tpetra/classic/NodeAPI/CMakeFiles/tpetraclassicnodeapi.dir/Kokkos_DefaultNode.cpp.o) +tpetra: sum(file_size_mb) = 0.28 (4 entries) +tpetra: max(file_size_mb) = 0.16 (packages/tpetra/classic/NodeAPI/CMakeFiles/tpetraclassicnodeapi.dir/Kokkos_DefaultNode.cpp.o) +""" + + +empty_summary_ascii = \ +r"""No build statistics to summarize!""" + + +class test_createAsciiReportOfBuildStatsSummaries(unittest.TestCase): + + def test_big_small(self): + buildStatsDOL = getBuildStatsForTests() + buildStatsBinnedBySubdirs = SBS.binBuildStatsDictOfListsBySubdirUnderDirs( + buildStatsDOL, [ "commonTools", "packages" ] ) + buildStatsSummariesBinnedBySubdirs = SBS.computeStdBuildStatsSummaries( + buildStatsBinnedBySubdirs ) + buildStatsAsciiReport = SBS.createAsciiReportOfBuildStatsSummaries( + buildStatsSummariesBinnedBySubdirs ) + self.assertEqual(buildStatsAsciiReport, + big_small_summary_ascii ) + + +############################################################################# +# +# Test summarize_build_stats.py +# +############################################################################# + +class test_summarize_build_stats_py(unittest.TestCase): + + def test_big_small_full_project(self): + cmnd = thisScriptsDir+"/../summarize_build_stats.py"+\ + " "+g_testBaseDir+"/build_stats.big.small.csv" + output = GSS.getCmndOutput(cmnd) + self.assertEqual(GSS.s(output), GSS.s(big_small_summary_full_project_ascii+"\n")) + + def test_big_small_by_subdir(self): + cmnd = thisScriptsDir+"/../summarize_build_stats.py"+\ + " --bin-by-subdirs-under-dirs=commonTools,packages"+\ + " "+g_testBaseDir+"/build_stats.big.small.csv" + output = GSS.getCmndOutput(cmnd) + self.assertEqual(GSS.s(output), GSS.s(big_small_summary_ascii+"\n")) + + def test_empty_build_stats(self): + cmnd = thisScriptsDir+"/../summarize_build_stats.py"+\ + " --bin-by-subdirs-under-dirs=commonTools,packages"+\ + " "+g_testBaseDir+"/build_stats.empty.csv" + output = GSS.getCmndOutput(cmnd) + self.assertEqual(GSS.s(output), GSS.s(empty_summary_ascii+"\n")) + + +# +# Run the unit tests! +# + +if __name__ == '__main__': + + unittest.main() diff --git a/trilinos_source15/commonTools/build_stats/wrapper/NMParser.py b/trilinos_source15/commonTools/build_stats/wrapper/NMParser.py new file mode 100644 index 0000000000..c178abfd3e --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/wrapper/NMParser.py @@ -0,0 +1,96 @@ +""" + +Note: +Try to by python2 and python3 compliant +""" +import subprocess # spawning nm +import re # re matching +import os # line seperator +import sys + + +from Python2and3 import b, s + + +class NMParser: + """Simple NM parser that""" + + # the values are + nm_option_csv_map = { + 'N' : 'symbol_debug', + 'p' : 'symbol_stack_unwind', + 'R' : 'symbol_ro_data_global', + 'r' : 'symbol_ro_data_local', + 'T' : 'symbol_text_global', + 't' : 'symbol_text_local', + 'u' : 'symbol_unique_global', + } + + nm_option_desc_map = { + 'N' : 'debugging symbol', + 'p' : 'stack unwind section', + 'R' : 'read only global data', + 'r' : 'read only local data', + 'T' : 'global text section', + 't' : 'local text section', + 'u' : 'unique global symbol', + } + + nm_re_type_expr = ''.join(nm_option_desc_map) + nm_re_str = r'^[a-zA-Z0-9]+\s+(?P[a-zA-Z0-9]{2,})\s+(?P[' \ + + nm_re_type_expr + '])\s+' + nm_re = re.compile(nm_re_str) + + @staticmethod + def parse_object(filename): + """ + Simple NM parsing of an object file + Given an object file, we call nm -aS file + + Next, we parse stdout and match symbol lines corresponding to types + from nm_option_desc_map. + + Data are aggregated into a dict using the keys from nm_option_desc_map + + The keys are obtained from nm_option_desc_map and enforced inside the regex used + See nm_re_type_expr, nm_re_str, and nm_re in the static fields of this class + """ + FNULL = None + if sys.version_info < (3,): + FNULL = open(os.devnull, 'w') + local_devnull = FNULL + else: + local_devnull = subprocess.DEVNULL + p = subprocess.Popen(['nm', '-aS', filename], + stdout=subprocess.PIPE, + stderr=local_devnull) + output = p.communicate()[0] + + if FNULL: FNULL.close() + + nm_counts = dict() + + for line in output.split(b(os.linesep)): + m = NMParser.nm_re.match(s(line)) + if m: + nm_counts[m.group('type')] = nm_counts.get(m.group('type'), 0) + 1 + # return what we found + return nm_counts + + @staticmethod + def print_counts(nm_counts, + cvs_line=False, + csv_header=False): + for k,v in nm_counts.items(): + print("\"{key}\",{value}".format(key=NMParser.nm_option_desc_map[k], + value=v)) + @staticmethod + def get_csv_map (nm_counts): + # create a map of the form: csv_header_str : value loop over the csv_map, + # which will guarantee we always return the same columns. otherwise, + # looping over nm_counts will only return csv columns found in this + # specific file , while the wrapper needs consistent output from all files + # parsed + csv_map = { v : nm_counts.get(k,0) for k,v in NMParser.nm_option_csv_map.items() } + return csv_map + diff --git a/trilinos_source15/commonTools/build_stats/wrapper/Python2and3.py b/trilinos_source15/commonTools/build_stats/wrapper/Python2and3.py new file mode 100644 index 0000000000..6d0f0c8851 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/wrapper/Python2and3.py @@ -0,0 +1,36 @@ +""" +Python2and3: Simple set of utilies to write Python code that is Python 2 and +Python 3 compatible. +""" + +import sys + +# +# Byte array / string / unicode support across Python 2 & 3 +# +# Note that the str class in Python 2 is an ASCII string (byte) array and in +# Python 3 it is a Unicode object. For Python 3 code that is backward compatible +# with Python 2, we sometimes need version-specific conversion functions to give +# us the data type we desire. These functions are: +# +# b(x) return a byte array of str x, much like b'' in +# Python 3 +# s(x) return a version-specific str object equivalent to x +# u(x) return a unicode object equivalent to x, much like +# u'' in Python 2 +# +if sys.version_info < (3,): + # Python 2 + def b(x): return x + def s(x): return x + def u(x): return unicode(x) +else: + # Python 3 + import codecs + def b(x): return codecs.latin_1_encode(x)[0] + def s(x): + try: + return x.decode("utf-8") + except AttributeError: + return x + def u(x): return x diff --git a/trilinos_source15/commonTools/build_stats/wrapper/WrapperCommandLineParser.py b/trilinos_source15/commonTools/build_stats/wrapper/WrapperCommandLineParser.py new file mode 100644 index 0000000000..08551dd2f0 --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/wrapper/WrapperCommandLineParser.py @@ -0,0 +1,240 @@ +import os +import sys + +class WrapperCommandLineParser: + """ + Commandline parsing find any wrapper args, determine any output names + """ + def __init__(self, cmdline_args): + # base build directory for computing correct relative path + self.base_build_dir = '' + # if we write anything out it goes here + self.output_stats_file = '' + # if op generates an output file (-o ...) + self.op_output_file = '' + # if we perform an operation this is it + self.op = '' + self.short_op = '' + # whether to gather and print a csv_banner + self.print_csv_banner = False + # whatever the op's args should be + self.op_args = [] + # a list of lists of commands to evaluate + self.commands = [] + # whether we have the output arg + self.have_output_arg = False + # ENV control variables + self.parse_nm = True + self.output_fields = None + + self.time_cmd = 'not_set' + # we must parse envs first, because they contain required parameters + self.parse_env_controls() + # finally parse the args + self.parse_cmdline_args(cmdline_args) + + def parse_env_controls(self): + """Parse control variables from the ENV (rather than command line) + + # REQUIRED + TRILINOS_BUILD_STATS_TIME_CMD points to a valid GNU Time executable + + # REQUIRED + TRILINOS_BUILD_STATS_INNER_OP: is the command we are wrapping + + # REQUIRED + TRILINOS_BUILD_STATS_BASE_DIR : We need to know the `root` of the build + tree so we annotate paths correctly (see github + PR 8638 for an issue with Makefile builds) + # OPTIONAL + TRILINOS_BUILD_STATS_OUTPUT_FIELDS : control what gets written to timing + files Can enable only some fields + e.g., + FileName,FileSize,op + + + """ + # optional, control which fields we write to a file + # This does not promise we will not parse all possible fields + # (That is to say, this does not promise any performance benefits) + self.output_fields = os.environ.get('TRILINOS_BUILD_STATS_OUTPUT_FIELDS') + + err_msg='' + # required : TRILINOS_BUILD_STATS_TIME_CMD + # TRILINOS_BUILD_STATS_INNER_OP + # TRILINOS_BUILD_STATS_BASE_DIR + if 'TRILINOS_BUILD_STATS_TIME_CMD' not in os.environ: + err_msg+=os.linesep + err_msg+=('TRILINOS_BUILD_STATS_TIME_CMD (ENV) is required. CMake should ' + '`find` and set this, if using the build tools manually, locate ' + 'GNU Time (typically /usr/bin/time) verify it supports `--format` ' + 'and `--output`. Then set TRILINOS_BUILD_STATS_TIME_CMD=/path/to/time') + + if 'TRILINOS_BUILD_STATS_INNER_OP' not in os.environ: + err_msg+=os.linesep + err_msg+=('TRILINOS_BUILD_STATS_INNER_OP (ENV) is required. CMake should ' + 'set this to a specific operations, e.g., ${CMAKE_C_COMPILER}. ' + 'If you are using the tools independently, please see the docs ' + 'for examples of how to write the wrapper scripts. E.g., ' + 'export TRILINOS_BUILD_STATS_INNER_OP=mpicc') + + if 'TRILINOS_BUILD_STATS_BASE_DIR' not in os.environ: + err_msg+=os.linesep + err_msg+=('TRILINOS_BUILD_STATS_BASE_DIR (ENV) is required. CMake should ' + 'set this to the build directory (top level). If using this script ' + 'manually, set this to your build directory (full path). E.g., ' + 'export TRILINOS_BUILD_STATS_BASE_DIR=/path/to/build') + + if err_msg: + sys.stderr.write(err_msg) + exit(-1) + + # set the required parameters - the dict will throw if these aren't defined + # but we should have exit() if any errors. + self.time_cmd = os.environ['TRILINOS_BUILD_STATS_TIME_CMD'] + self.op = os.environ['TRILINOS_BUILD_STATS_INNER_OP'] + self.base_build_dir = os.environ['TRILINOS_BUILD_STATS_BASE_DIR'] + + # we name the output as: blah.o.op.timing + # this will result in blah.ar.timing, blah.mpicc.timing blah.ld.timing... + self.short_op = os.path.basename(self.op) + self.output_stats_file = self.short_op + '.timing' + + parse_nm = os.environ.get('TRILINOS_BUILD_STATS_PARSE_NM', "True") + if parse_nm.lower() == 'true': + self.parse_nm = True + elif parse_nm.lower() == 'false': + self.parse_nm = False + else: + msg='ERROR: TRILINOS_BUILD_STATS_PARSE_NM is set to [{}]'.format(parse_nm) + msg+=', but valid values are True or False. Defaulting to True{}'.format(os.linesep) + sys.stderr.write(msg); + self.parse_nm = True + + def __repr__(self): + return self.lcl_print() + + def __str__(self): + return self.lcl_print() + + def generate_stats(self): + return self.have_output_arg + + def lcl_print(self): + fmt_string = [ + 'output_stats_file : {output_stats_file}', + 'op : {op}', + 'op_output_file : {op_output_file}', + 'print_csv_banner : {print_csv_banner}', + + ] + return '\n'.join(fmt_string).format( + output_stats_file=self.output_stats_file, + op_output_file=self.op_output_file, + op=self.op, + print_csv_banner=self.print_csv_banner) + + def get_output_fields(self,csv_map): + if self.output_fields: + # this assumes it is a string of comma separated labels + fields = self.output_fields.split(',') + else: + # apply sort here, so the output will be deterministic + fields = sorted([ k for k in csv_map ]) + + return fields + + def generate_commandlets(self, cmdline_args): + + # it seems we need to handle compound commands e.g., && (maybe ||) + cmdlet = [] + for arg in cmdline_args: + if arg.strip() == "&&": + # add the command + self.commands.append(cmdlet) + # start a new command + cmdlet = [] + elif arg.strip() != '': + cmdlet.append(arg) + + if cmdlet: + self.commands.append(cmdlet) + # post - should have all commands broken up into lists of lists (of options) + return + + def parse_cmdline_arg_helper(self, cmdline_args): + + self.have_output_arg = False + # we want to do something different for ar, ranlib, or ld.* + # these commands do not necessarily have a good 'output' arg denoted by -o + # first try to find -o, if that passes then use it. + # if not, then do something special based on ar/ranlib/ld.* + + # find the output arg (will raise an exception if not found) + # we use -o blah.o or -o /path/to/blah.o or none at all + try: + output_idx = cmdline_args.index('-o') + self.op_output_file = cmdline_args[output_idx+1] + self.output_stats_file = self.op_output_file + '.' + self.output_stats_file + + self.have_output_arg = True + return + + except: + pass + + # we failed -o, so try op specific stuff + if self.short_op.endswith('ar') or self.short_op.endswith('ranlib'): + for arg in cmdline_args: + if arg.endswith('.a'): + self.op_output_file = arg + self.output_stats_file = arg + '.' + self.output_stats_file + self.have_output_arg = True + return + # we hit this if we can't find a .a + return + + + def parse_cmdline_args(self, cmdline_args): + wrapper_header_arg = '----get_header' + print_csv_header=False + # require that any wrapper arg be the first + try: + wrapper_arg_idx = 1 + wrapper_arg = cmdline_args[wrapper_arg_idx] + if wrapper_arg == wrapper_header_arg: + self.print_csv_banner=True + # this isn't implemented.... + sys.stderr.write('----get_header was requested, but is not implemented' + '. Doing nothing.') + exit(0) + + self.parse_cmdline_arg_helper(cmdline_args) + + # Remove the script name + self.op_args = cmdline_args[1:] + # we could clean this whole thing up some.. + self.generate_commandlets([self.op] + self.op_args) + + except Exception as e: + print("Got an error parsing the command line in the compiler wrapper python script") + print(e) + raise + # any error and we give up + help_msg = ["Compiler wrapper:", + " Usage: wrapper [---base-build-dir=] ----op= [args] | ----get_header", + "", + " ----base-build-dir=/path/to/base/build/dir", + " Absolute path to the base project build directory", + " ----op=/path/to/compiler", + " Absolute path to the compiler we are wrapping", + " ----get_header", + " May not be combined with ----op or ----base-build-dir, prints the csv_header generated", + "", + " Tool depends on finding a -o option in args", + " statistics will be written to .timing", + ] + print('\n'.join(help_msg)) + #raise + sys.exit(0) + diff --git a/trilinos_source15/commonTools/build_stats/wrapper/WrapperOpTimer.py b/trilinos_source15/commonTools/build_stats/wrapper/WrapperOpTimer.py new file mode 100644 index 0000000000..a1f5502ffd --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/wrapper/WrapperOpTimer.py @@ -0,0 +1,227 @@ +import subprocess +import csv +import os +from WrapperCommandLineParser import WrapperCommandLineParser + +def get_full_header(fields_list,full_header_map): + return ','.join([ full_header_map[f] for f in fields_list ]) + + +# the values are +usr_bin_time_csv_map = { + "E": + "elapsed_real_time_fmt", + "e": + "elapsed_real_time_sec", + "S": + "cpu_sec_kernel_mode", + "U": + "cpu_sec_user_mode", + "P": + "perc_cpu_used", + "M": + "max_resident_size_Kb", + "t": + "avg_resident_size_Kb", + "K": + "avg_total_memory_used_Kb", + "D": + "avg_size_unshared_data_area_Kb", + "p": + "avg_size_unshared_stack_area_Kb", + "X": + "avg_size_unshared_text_area_Kb", + "Z": + 'page_size_bytes', + "F": + "num_major_page_faults", + "R": + "num_minor_page_faults", + "W": + "num_swapped", + "c": + "num_involuntary_context_switch", + "w": + "num_waits", + "I": + "num_filesystem_inputs", + "O": + "num_filesystem_outputs", + "r": + "num_socket_msg_recv", + "s": + "num_socket_msg_sent", + "k": + "num_signals", + "x": + "exit_status", +} + +usr_bin_time_desc_map = { + "E": + "Elapsed real time ([h:]m:s)", + "e": + "Elapsed real time (s)", + "S": + "Total number of CPU-seconds that the process spent in kernel mode", + "U": + "Total number of CPU-seconds that the process spent in user mode", + "P": + "Percentage of the CPU that this job got", + "M": + "Maximum resident set size of the process during its lifetime (Kb)", + "t": + "(Not in tcsh.) Average resident set size of the process (Kb)", + "K": + "Average total (data+stack+text) memory use of the process (Kb)", + "D": + "Average size of unshared data area (Kb)", + "p": + "Average size of unshared stack space (Kb)", + "X": + "Average size of shared text space (Kb)", + "Z": + "System page size (bytes)", + "F": + "Number of major page faults", + "R": + "Number of minor or recoverable page faults", + "W": + "Number of times the process was swapped out of main memory", + "c": + "Number of times the process was context-switched involuntarily", + "w": + "Number of waits", + "I": + "Number of file system inputs by the process", + "O": + "Number of file system outputs by the process", + "r": + "Number of socket messages received by the process", + "s": + "Number of socket messages sent by the process", + "k": + "Number of signals delivered to the process", + "x": + "(Not in tcsh.) Exit status of the command", +} + +default_fields = [ + "e", + "M", + "K", + "D", + "X", + "F", + "R", + "W", + "w", + "c", + "S", + "U", + "P", + "I", + "O", + "r", + "s", + "k", + "x", + ] + +field_header_full = get_full_header(default_fields, usr_bin_time_csv_map) #','.join([ WrapperOpTimer.usr_bin_time_csv_map[f] for f in default_fields ]) +field_header_short = ','.join(default_fields) +field_arg = '--format=' + field_header_full + '\n' + ','.join([ '%{}'.format(f) for f in default_fields] ) + +class WrapperOpTimer: + + @staticmethod + def run_cmd(cmd): + p = subprocess.Popen(cmd) + p.communicate() + returncode = p.returncode + return returncode + + @staticmethod + def time_op(wcp): + """ + evaluate 'op' with 'op_args', and gather stats into output_stats_file + """ + # if os.path.exists(output_stats_file) and os.path.getsize(output_stats_file) > 0: + # print("WARNING: File '"+output_stats_file+"' exists and will be overwritten") + # print("op='"+op+"'") + # print("op_args='"+str(op_args)+"'") + # print("op_output_file='"+op_output_file+"'") + + # initializing the titles and rows list + fields = [] + csv_row = {} + + cmdcount = 0 + returncode = 0 + for cmd in wcp.commands: + if cmdcount == 0: + cmd = [ wcp.time_cmd, + # '--append', + '--output=' + wcp.output_stats_file, + field_arg, + ] + cmd + cmdcount += 1 + returncode |= WrapperOpTimer.run_cmd(cmd) + + # reading csv file + with open(wcp.output_stats_file, 'r') as csvfile: + # creating a csv reader object + csvreader = csv.reader(csvfile) + + # extracting field names through first row + fields = next(csvreader) + + # extracting each data row one by one + # we effectively retain only the last row. + # it isn't clear if we should expect multiple rows per file + # + # In the bash version of this I was able to handle multiple rows per file + # We could do that here, but it would require returning a list of csv maps + # On the system side of things, it is very murky. We would need to ensure + # file integrity (concurrent reads/writes). For now, it's + # best to enforce 1 file per operation performed. (which should happen if we + # name things correctly) - That is invalid is there is a cycle in the Build graph, + # but that is a larger problem. + for row in csvreader: + csv_row = dict(zip(fields, row)) + + # FileSize + csv_row['FileSize'] = WrapperOpTimer.get_file_size(wcp.op_output_file) + + # add a field with the short op + csv_row['op'] = os.path.basename(wcp.op) + + # FileName + if wcp.base_build_dir: + abs_base_build_dir = os.path.abspath(wcp.base_build_dir) + current_working_dir = os.path.abspath(os.getcwd()) + rel_path_to_base_build_dir = os.path.relpath( + current_working_dir, start=abs_base_build_dir) + rel_op_output_file = os.path.join(rel_path_to_base_build_dir, wcp.op_output_file) + else: + rel_op_output_file = wcp.op_output_file + csv_row['FileName'] = rel_op_output_file + + # Remove the build stats output file if the build failed + if returncode != 0 and os.path.exists(wcp.output_stats_file): + os.remove(wcp.output_stats_file) + + return (csv_row, returncode) + + + # returns the file size in bytes + @staticmethod + def get_file_size(filename): + sz = -1 + try: + sz = os.stat(filename).st_size + except: + pass + return sz + + diff --git a/trilinos_source15/commonTools/build_stats/wrapper/magic_wrapper.py b/trilinos_source15/commonTools/build_stats/wrapper/magic_wrapper.py new file mode 100755 index 0000000000..803ec3208b --- /dev/null +++ b/trilinos_source15/commonTools/build_stats/wrapper/magic_wrapper.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 +''' + +Note: +Try to by python2 and python3 compliant + +Ideally, keep this file minimal to reduce parsing overhead +Most 'work' is done in the Classes imported +''' + +# we need to run subcommands +import os +import csv +import sys +from NMParser import NMParser +from WrapperCommandLineParser import WrapperCommandLineParser +from WrapperOpTimer import WrapperOpTimer + +# given a dict of key/val pairs, write them as a CSV line +def write_csv_map(filename,csv_map,csv_fields): + try: + with open(filename, 'w') as csvfile: + writer = csv.DictWriter(csvfile, + fieldnames=csv_fields, + # ignore fields in the csv_map that aren't + # in fieldnames + extrasaction='ignore') + writer.writeheader() + writer.writerow(csv_map) + except IOError: + print("I/O error") + +def main(cmdline_args): + # parse the command line args, find wrapper args and organize the + # the info into fields in this class + wcp = WrapperCommandLineParser(cmdline_args) + # you can 'print()' a WrapperCommandLineParser + # we could add a verbose mode (----verbose) + #print(wcp) + + # keep a dict of field : value + # first do the operation + # this must be first, as it generates the output file + # + # WARNING: Be very careful with stdout before these commands. If the wrapped command + # has shell redirection it can slurp up Python's output... best to require all messages + # go after the compiler commnand has completed. + if wcp.generate_stats(): + (csv_map, returncode) = WrapperOpTimer.time_op(wcp) + #print("======> Gathering stats...", file=sys.stdout) + else: + # only run the command and return the return code + returncode = 0 + for cmd in wcp.commands: + returncode |= WrapperOpTimer.run_cmd(cmd) + #print("##======> NO stats {}".format(wcp.op_output_file), file=sys.stdout) + return returncode + + if returncode == 0: + if wcp.parse_nm: + # test nm + # we probably need some to handle the case the .o isn't created + # as-us, the following will return empty dicts (we parse/validate) the output + # from NM, so we won't return garbage + nmp = NMParser.parse_object(wcp.op_output_file) + # NMParser.print_counts(nmp) + # add NM's output to our csv data + # we could move this into the NM parser + csv_map.update(NMParser.get_csv_map(nmp)) + + # ultimately, print the csv data to a file + # make sure to quote csv columns + write_csv_map(wcp.output_stats_file, + csv_map, + csv_fields=wcp.get_output_fields(csv_map)) + + # NOTE: Above, we don't write the *.timing file if the build failed because + # the output target file may not exist! And we don't want a CSV file entry + # that does not have all of the fields. ToDo: It might be nice to have an + # entry for files that don't build and just put in empty values for NM data + # so we can display that with the other stats + + return returncode + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) +