Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updated trigger-filter with functionality to use beam current from RAW::epics or a detector raw multiplicity to select events with beam when scalers are unavailable #86

Merged
merged 11 commits into from
Sep 21, 2023
Merged
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
package org.jlab.analysis.eventmerger;
import java.util.LinkedHashMap;
import java.util.Map;
import org.jlab.jnp.hipo4.data.*;
import org.jlab.jnp.hipo4.io.HipoReader;
import org.jlab.jnp.utils.data.TextHistogram;

/**
* Hipo Reduce Worker: filter event based on bank size
*
* Inputs: bank name and number of rows
* Returns "true" if selected bank size is greater than given value or bank name is an empty string
* Returns "false" otherwise
* @author devita
*/
public class FilterBank implements Worker {

private Bank filterBank = null;
private String bankName = null;
private int nRows = -1;
private int[] rowBuffer = new int[21];
private int rowMax = 500;

public FilterBank(String bankName,int nRows){
this.bankName = bankName;
this.nRows = nRows;
System.out.println("\nInitializing bank size reduction: bank set to " + this.bankName + " with minimum rows set to " + this.nRows + "\n");
}

/**
* Initialize bank schema
*
* @param reader
*/
@Override
public void init(HipoReader reader) {
if(!bankName.isEmpty())
filterBank = new Bank(reader.getSchemaFactory().getSchema(bankName));
}

/**
* Event filter: select events according to trigger bit
*
* @param event
* @return
*/
@Override
public boolean processEvent(Event event) {

if(filterBank==null) return true;

event.read(filterBank);
double value = (double) filterBank.getRows();

// fill statistics array
int rowBins = rowBuffer.length-1;
if(value>rowMax){
rowBuffer[rowBins] = rowBuffer[rowBins] + 1;
} else{
int bin = (int) (rowBins*value/(rowMax));
rowBuffer[bin] = rowBuffer[bin] + 1;
}

return filterBank.getRows()>this.nRows;
}

// This function has to be implemented, but not used if
// HipoStream is not trying to classify the events.
@Override
public long clasifyEvent(Event event) { return 0L; }

/**
* Get Map of beam current values
* @return
*/
public Map<String,Double> getBankSizeMap(){
Map<String,Double> sizeMap = new LinkedHashMap<>();
int rowBins = rowBuffer.length-1;
double step = ((double) rowMax)/rowBins;
for(int i = 0; i < rowBins; i++){
String key = String.format("[%6.1f -%6.1f]", (i*step),(i+1)*step);
sizeMap.put(key, (double) rowBuffer[i]);
}
sizeMap.put("overflow", (double) rowBuffer[rowBins] );
return sizeMap;
}

/**
* Show beam current histogram
*/
public void showStats() {
if(filterBank==null) return;
System.out.println("\n\n");
System.out.println(bankName.toUpperCase() + " BANK SIZE HISTOGRAM (ENTRIES ARE EVENTS)\n");
TextHistogram histo = new TextHistogram();
Map<String,Double> sizeMap = this.getBankSizeMap();
histo.setPrecision(0);
histo.setMinDecriptorWidth(28);
histo.setWidth(80);
histo.setData(sizeMap);
histo.print();
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package org.jlab.analysis.eventmerger;
import java.util.LinkedHashMap;
import java.util.Map;
import org.jlab.detector.epics.Epics;
import org.jlab.detector.epics.EpicsSequence;
import org.jlab.detector.scalers.DaqScalersSequence;
import org.jlab.jnp.hipo4.data.*;
import org.jlab.jnp.hipo4.io.HipoReader;
Expand All @@ -17,18 +19,26 @@

public class FilterFcup implements Worker {

Bank runConfigBank = null;
DaqScalersSequence chargeSeq = null;
public final static String FCUP_SCALER = "DSC2";
private Bank runConfigBank = null;
private DaqScalersSequence scalerSeq = null;
private EpicsSequence epicsSeq = null;
private double charge = -1;
private double current = -1;
private String source = null;
private int[] currentBuffer = new int[21];
private int currentMax = 80;

public FilterFcup(double current){
public FilterFcup(double current, String source){
this.current=current;
System.out.println("\nInitializing Faraday Cup reduction: threshold current set to " + this.current + "\n");
this.source=source;
System.out.print("\nInitializing Faraday Cup reduction: threshold current set to " + this.current);
System.out.print("\n current source set to " + (this.source.equals(FCUP_SCALER) ? source : "RAW:epics."+source) + "\n");
}

public FilterFcup(double current){
this(current, FCUP_SCALER);
}


/**
* Initialize bank schema
Expand All @@ -46,7 +56,16 @@ public void init(HipoReader reader) {
* @param sequence
*/
public void setScalerSequence(DaqScalersSequence sequence) {
this.chargeSeq=sequence;
this.scalerSeq=sequence;
}

/**
* Set sequence of Epics readings
*
* @param sequence
*/
public void setEpicsSequence(EpicsSequence sequence) {
this.epicsSeq=sequence;
}

/**
Expand All @@ -61,9 +80,16 @@ public boolean processEvent(Event event) {

if(runConfigBank.getRows()>0){
long timeStamp = runConfigBank.getLong("timestamp",0);
int unixTime = runConfigBank.getInt("unixtime",0);

// get beam current
double value=chargeSeq.getInterval(timeStamp).getBeamCurrent();
double value=0;
if(source.equals(FCUP_SCALER))
value = scalerSeq.getInterval(timeStamp).getBeamCurrent();
else {
if(epicsSeq.get(unixTime)!=null)
value = epicsSeq.getMinimum(source, 0, unixTime);
}

// fill statistics array
int currentBins = currentBuffer.length-1;
Expand Down Expand Up @@ -92,7 +118,7 @@ public boolean processEvent(Event event) {
* @return
*/
public Map<String,Double> getCurrentMap(){
Map<String,Double> sizeMap = new LinkedHashMap<String,Double>();
Map<String,Double> sizeMap = new LinkedHashMap<>();
int currentBins = currentBuffer.length-1;
double step = ((double) currentMax)/currentBins;
for(int i = 0; i < currentBins; i++){
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,25 @@

/**
* Hipo Reduce Worker: filter event based on trigger bit
*
* Inputs: selected trigger bit (0-63)
* Returns "true" is selected bit is set in the trigger bit word and no other bits are set

Inputs: selected and vetoed trigger bit masks (64 selectedBits)
Returns "true" if one of the bits in the selectedBits mask is set in the trigger
word and none of the bit in the vetoedBits mask is
* @author devita
*/
public class FilterTrigger implements Worker {

Bank triggerBank = null;
DaqScalersSequence chargeSeq = null;
int bit = -1;
long selectedBits = 0L;
long vetoedBits = 0L;

public FilterTrigger(int bit){
this.bit=bit;
System.out.println("\nInitializing trigger reduction: bit set to " + this.bit + "\n");
public FilterTrigger(long bits, long veto){
this.selectedBits=bits;
this.vetoedBits=veto;
System.out.println("\nInitializing trigger reduction:");
System.out.println("\t selected bit mask set to 0x" + Long.toHexString(bits));
System.out.println("\t vetoed bit mask set to 0x" + Long.toHexString(veto));
}

/**
Expand All @@ -43,13 +48,8 @@ public boolean processEvent(Event event) {
event.read(triggerBank);
if(triggerBank.getRows()>0){
long triggerBit = triggerBank.getLong("trigger",0);
long timeStamp = triggerBank.getLong("timestamp",0);
// Value will be >0 if selected bit is 1 in triggerBit
int value = DataByteUtils.readLong(triggerBit, bit, bit);
// Check that no other bit is set
long thisBit = value*((long) Math.pow(2, bit));
// If returned true, the event will be write to the output
if(value>0 && thisBit==triggerBit) return true;
if((triggerBit & selectedBits) !=0L && (triggerBit & vetoedBits) == 0L) return true;
}
return false;
}
Expand Down
Loading