diff --git a/docs/src/miller-as-library/main3.go b/docs/src/miller-as-library/main3.go index 23a400453d..617488c330 100644 --- a/docs/src/miller-as-library/main3.go +++ b/docs/src/miller-as-library/main3.go @@ -89,10 +89,10 @@ func convert_csv_to_json(fileNames []string) error { case ierr := <-inputErrorChannel: retval = ierr break - case _ = <-dataProcessingErrorChannel: + case <-dataProcessingErrorChannel: retval = errors.New("exiting due to data error") // details already printed break - case _ = <-doneWritingChannel: + case <-doneWritingChannel: done = true break } diff --git a/pkg/bifs/arithmetic.go b/pkg/bifs/arithmetic.go index 1ce24544a1..bffcc5309e 100644 --- a/pkg/bifs/arithmetic.go +++ b/pkg/bifs/arithmetic.go @@ -946,7 +946,7 @@ func BIF_minlen_variadic(mlrvals []*mlrval.Mlrval) *mlrval.Mlrval { } // Do the bulk arithmetic on native ints not Mlrvals, to avoid unnecessary allocation. retval := lib.UTF8Strlen(mlrvals[0].OriginalString()) - for i, _ := range mlrvals { + for i := range mlrvals { clen := lib.UTF8Strlen(mlrvals[i].OriginalString()) if clen < retval { retval = clen @@ -1116,7 +1116,7 @@ func BIF_maxlen_variadic(mlrvals []*mlrval.Mlrval) *mlrval.Mlrval { } // Do the bulk arithmetic on native ints not Mlrvals, to avoid unnecessary allocation. retval := lib.UTF8Strlen(mlrvals[0].OriginalString()) - for i, _ := range mlrvals { + for i := range mlrvals { clen := lib.UTF8Strlen(mlrvals[i].OriginalString()) if clen > retval { retval = clen diff --git a/pkg/bifs/regex.go b/pkg/bifs/regex.go index 011c21ac29..2095003a6c 100644 --- a/pkg/bifs/regex.go +++ b/pkg/bifs/regex.go @@ -147,7 +147,7 @@ func BIF_strmatchx(input1, input2 *mlrval.Mlrval) *mlrval.Mlrval { captures_array := make([]*mlrval.Mlrval, len(captures)) if len(captures) > 0 { - for i, _ := range captures { + for i := range captures { if i == 0 { results.PutReference("full_capture", mlrval.FromString(captures[i])) } else { @@ -156,7 +156,7 @@ func BIF_strmatchx(input1, input2 *mlrval.Mlrval) *mlrval.Mlrval { } starts_array := make([]*mlrval.Mlrval, len(starts)) - for i, _ := range starts { + for i := range starts { if i == 0 { results.PutReference("full_start", mlrval.FromInt(int64(starts[i]))) } else { @@ -165,7 +165,7 @@ func BIF_strmatchx(input1, input2 *mlrval.Mlrval) *mlrval.Mlrval { } ends_array := make([]*mlrval.Mlrval, len(ends)) - for i, _ := range ends { + for i := range ends { if i == 0 { results.PutReference("full_end", mlrval.FromInt(int64(ends[i]))) } else { diff --git a/pkg/bifs/stats.go b/pkg/bifs/stats.go index d7bd3f1068..bc9bc6e43c 100644 --- a/pkg/bifs/stats.go +++ b/pkg/bifs/stats.go @@ -636,7 +636,7 @@ func bif_percentiles_impl( outputs := make([]*mlrval.Mlrval, len(ps)) - for i, _ := range ps { + for i := range ps { p, ok := ps[i].GetNumericToFloatValue() if !ok { outputs[i] = type_error_named_argument(funcname, "numeric", "percentile", ps[i]) @@ -655,7 +655,7 @@ func bif_percentiles_impl( return mlrval.FromArray(outputs) } else { m := mlrval.NewMlrmap() - for i, _ := range ps { + for i := range ps { sp := ps[i].String() m.PutCopy(sp, outputs[i]) } diff --git a/pkg/input/line_reader.go b/pkg/input/line_reader.go index 663178c5df..b1f965307b 100644 --- a/pkg/input/line_reader.go +++ b/pkg/input/line_reader.go @@ -201,7 +201,7 @@ func channelizedLineReader( // quickly, as it should. if i%recordsPerBatch == 0 { select { - case _ = <-downstreamDoneChannel: + case <-downstreamDoneChannel: done = true break default: diff --git a/pkg/input/pseudo_reader_gen.go b/pkg/input/pseudo_reader_gen.go index fa949e01ae..495a6dab7d 100644 --- a/pkg/input/pseudo_reader_gen.go +++ b/pkg/input/pseudo_reader_gen.go @@ -96,7 +96,7 @@ func (reader *PseudoReaderGen) process( // avoid goroutine-scheduler thrash. eof := false select { - case _ = <-downstreamDoneChannel: + case <-downstreamDoneChannel: eof = true break default: diff --git a/pkg/input/record_reader_csv.go b/pkg/input/record_reader_csv.go index 20c1fd15d1..6ed07250d8 100644 --- a/pkg/input/record_reader_csv.go +++ b/pkg/input/record_reader_csv.go @@ -158,7 +158,7 @@ func channelizedCSVRecordScanner( // quickly, as it should. if i%recordsPerBatch == 0 { select { - case _ = <-downstreamDoneChannel: + case <-downstreamDoneChannel: done = true break default: diff --git a/pkg/input/record_reader_json.go b/pkg/input/record_reader_json.go index 096e506fb5..52844025e7 100644 --- a/pkg/input/record_reader_json.go +++ b/pkg/input/record_reader_json.go @@ -99,7 +99,7 @@ func (reader *RecordReaderJSON) processHandle( i++ if i%recordsPerBatch == 0 { select { - case _ = <-downstreamDoneChannel: + case <-downstreamDoneChannel: eof = true break default: diff --git a/pkg/input/record_reader_pprint.go b/pkg/input/record_reader_pprint.go index b83710f7bc..aad87769c2 100644 --- a/pkg/input/record_reader_pprint.go +++ b/pkg/input/record_reader_pprint.go @@ -227,7 +227,7 @@ func getRecordBatchExplicitPprintHeader( continue } fields := make([]string, npad-2) - for i, _ := range paddedFields { + for i := range paddedFields { if i == 0 || i == npad-1 { continue } @@ -361,7 +361,7 @@ func getRecordBatchImplicitPprintHeader( paddedFields := reader.fieldSplitter.Split(line) npad := len(paddedFields) fields := make([]string, npad-2) - for i, _ := range paddedFields { + for i := range paddedFields { if i == 0 || i == npad-1 { continue } diff --git a/pkg/input/record_reader_xtab.go b/pkg/input/record_reader_xtab.go index b108f771db..5d15300074 100644 --- a/pkg/input/record_reader_xtab.go +++ b/pkg/input/record_reader_xtab.go @@ -202,7 +202,7 @@ func channelizedStanzaScanner( // quickly, as it should. if numStanzasSeen%recordsPerBatch == 0 { select { - case _ = <-downstreamDoneChannel: + case <-downstreamDoneChannel: done = true break default: diff --git a/pkg/mlrval/mlrval_output.go b/pkg/mlrval/mlrval_output.go index a727609986..b6fd84e0fe 100644 --- a/pkg/mlrval/mlrval_output.go +++ b/pkg/mlrval/mlrval_output.go @@ -120,7 +120,7 @@ func (mv *Mlrval) StringifyValuesRecursively() { switch mv.mvtype { case MT_ARRAY: - for i, _ := range mv.intf.([]*Mlrval) { + for i := range mv.intf.([]*Mlrval) { mv.intf.([]*Mlrval)[i].StringifyValuesRecursively() } diff --git a/pkg/platform/getargs_windows.go b/pkg/platform/getargs_windows.go index 4349e43462..7a2f1af3d5 100644 --- a/pkg/platform/getargs_windows.go +++ b/pkg/platform/getargs_windows.go @@ -79,7 +79,7 @@ func GetArgs() []string { //printArgs(retargs, "NEW") globbed := make([]string, 0) - for i, _ := range retargs { + for i := range retargs { // Expand things like *.csv matches, err := filepath.Glob(retargs[i]) if matches != nil && err == nil { diff --git a/pkg/terminals/repl/session.go b/pkg/terminals/repl/session.go index 27bd3896a6..2f25beb319 100644 --- a/pkg/terminals/repl/session.go +++ b/pkg/terminals/repl/session.go @@ -176,7 +176,7 @@ func (repl *Repl) handleSession(istream *os.File) error { doneDraining := false for { select { - case _ = <-repl.appSignalNotificationChannel: + case <-repl.appSignalNotificationChannel: line = "" // Ignore any partially-entered line -- a ^C should do that default: doneDraining = true diff --git a/pkg/transformers/tee.go b/pkg/transformers/tee.go index bff0065ea7..e5f5413cad 100644 --- a/pkg/transformers/tee.go +++ b/pkg/transformers/tee.go @@ -183,7 +183,7 @@ func (tr *TransformerTee) Transform( // But 'mlr cut -f foo then tee bar.txt then head -n 10' -- one does expect // bar.txt to have all the output from cut. select { - case _ = <-inputDownstreamDoneChannel: + case <-inputDownstreamDoneChannel: // Do not write this to the coutputDownstreamDoneChannel, as other transformers do break default: