From ddf0354852bb290dac2b85e34348667729d95e27 Mon Sep 17 00:00:00 2001 From: Matt Graham Date: Thu, 31 Oct 2024 15:56:11 -0700 Subject: [PATCH 1/8] add a bunch of files for the nospacing readout; also add KFOutputDriver.java which I use to make comparison plots and should be in master at some point --- ...lDigiWithPulserNoSpacingReadoutDriver.java | 1899 +++++++++++++++++ ...alDigiWithPulseNoSpacingReadoutDriver.java | 153 ++ ...calRawConverterNoSpacingReadoutDriver.java | 153 ++ .../hps/digi/nospacing/EmptyEventsDriver.java | 147 ++ .../nospacing/EmptyEventsReadoutDriver.java | 223 ++ .../GTPClusterNoSpacingReadoutDriver.java | 390 ++++ ...peDigiWithPulseNoSpacingReadoutDriver.java | 224 ++ .../HodoscopePatternNoSpacingDriver.java | 436 ++++ ...opeRawConverterNoSpacingReadoutDriver.java | 78 + .../NoSpacingTriggerDriver.java.donothing | 159 ++ .../RawConverterNoSpacingReadoutDriver.java | 259 +++ ...glesTrigger2019NoSpacingReadoutDriver.java | 415 ++++ ...tDigiWithPulserNoSpacingReadoutDriver.java | 867 ++++++++ .../hps/steering/readout/TestNoSpacing.lcsim | 447 ++++ .../recon/tracking/kalman/KFOutputDriver.java | 1296 +++++++++++ 15 files changed, 7146 insertions(+) create mode 100644 digi/src/main/java/org/hps/digi/nospacing/CalDigiWithPulserNoSpacingReadoutDriver.java create mode 100644 digi/src/main/java/org/hps/digi/nospacing/EcalDigiWithPulseNoSpacingReadoutDriver.java create mode 100755 digi/src/main/java/org/hps/digi/nospacing/EcalRawConverterNoSpacingReadoutDriver.java create mode 100644 digi/src/main/java/org/hps/digi/nospacing/EmptyEventsDriver.java create mode 100644 digi/src/main/java/org/hps/digi/nospacing/EmptyEventsReadoutDriver.java create mode 100755 digi/src/main/java/org/hps/digi/nospacing/GTPClusterNoSpacingReadoutDriver.java create mode 100644 digi/src/main/java/org/hps/digi/nospacing/HodoscopeDigiWithPulseNoSpacingReadoutDriver.java create mode 100644 digi/src/main/java/org/hps/digi/nospacing/HodoscopePatternNoSpacingDriver.java create mode 100755 digi/src/main/java/org/hps/digi/nospacing/HodoscopeRawConverterNoSpacingReadoutDriver.java create mode 100755 digi/src/main/java/org/hps/digi/nospacing/NoSpacingTriggerDriver.java.donothing create mode 100755 digi/src/main/java/org/hps/digi/nospacing/RawConverterNoSpacingReadoutDriver.java create mode 100644 digi/src/main/java/org/hps/digi/nospacing/SinglesTrigger2019NoSpacingReadoutDriver.java create mode 100755 digi/src/main/java/org/hps/digi/nospacing/SvtDigiWithPulserNoSpacingReadoutDriver.java create mode 100644 steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacing.lcsim create mode 100644 tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java diff --git a/digi/src/main/java/org/hps/digi/nospacing/CalDigiWithPulserNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/CalDigiWithPulserNoSpacingReadoutDriver.java new file mode 100644 index 000000000..9647269d1 --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/CalDigiWithPulserNoSpacingReadoutDriver.java @@ -0,0 +1,1899 @@ +package org.hps.digi.nospacing; + +import static org.hps.recon.ecal.EcalUtils.fallTime; +import static org.hps.recon.ecal.EcalUtils.maxVolt; +import static org.hps.recon.ecal.EcalUtils.nBit; +import static org.hps.recon.ecal.EcalUtils.riseTime; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.hps.readout.ReadoutDriver; +import org.hps.readout.ReadoutDataManager; +import org.hps.readout.ReadoutTimestamp; +import org.hps.readout.util.DoubleRingBuffer; +import org.hps.readout.util.IntegerRingBuffer; +import org.hps.readout.util.ObjectRingBuffer; +import org.hps.readout.util.collection.LCIOCollection; +import org.hps.readout.util.collection.LCIOCollectionFactory; +import org.hps.readout.util.collection.TriggeredLCIOData; +import org.hps.recon.ecal.EcalUtils; +import org.hps.util.RandomGaussian; +import org.lcsim.event.CalorimeterHit; +import org.lcsim.event.EventHeader; +import org.lcsim.event.LCRelation; +import org.lcsim.event.MCParticle; +import org.lcsim.event.RawCalorimeterHit; +import org.lcsim.event.RawTrackerHit; +import org.lcsim.event.SimCalorimeterHit; +import org.lcsim.event.base.BaseCalorimeterHit; +import org.lcsim.event.base.BaseLCRelation; +import org.lcsim.event.base.BaseRawCalorimeterHit; +import org.lcsim.event.base.BaseRawTrackerHit; +import org.lcsim.event.base.BaseSimCalorimeterHit; +import org.lcsim.geometry.Detector; +import org.lcsim.geometry.compact.Subdetector; +import org.lcsim.lcio.LCIOConstants; + +/** + * Class DigitizationWithPulserDataMergingReadoutDriver performs digitization + * of truth hits from SLIC by converting them into emulated pulses and merges pulser data, + * and then performing pulse integration. The results are output in + * the form of {@link org.lcsim.event.RawCalorimeterHit + * RawCalorimeterHit} objects. + *

+ * The truth hit information is retained by also producing an output + * collection of {@link org.lcsim.event.LCRelation LCRelation} + * objects linking the raw hits to the original {@link + * org.lcsim.event.SimCalorimeterHit SimCalorimeterHit} objects from + * which they were generated. + *

+ * DigitizationReadoutDriver is itself abstract. It is + * designed with the intent to function for both the hodoscope and + * the calorimeter. As such, it requires its implementing classes to + * handle certain subdetector-specific tasks. + * + * @author Tongtong Cao + */ +public abstract class CalDigiWithPulserNoSpacingReadoutDriver extends ReadoutDriver { + + // ============================================================== + // ==== LCIO Collections ======================================== + // ============================================================== + + /** + * Specifies the name of the subdetector geometry object. + */ + private String geometryName = null; + /** + * The name of the input {@link org.lcsim.event.SimCalorimeterHit + * SimCalorimeterHit} truth hit collection from SLIC. + */ + private String truthHitCollectionName = null; + /** + * The name of the input {@link org.lcsim.event.RawTrackerHit + * RawTrackerHit} collection from pulser data. + */ + private String PulserDataCollectionName = null; + /** + * The name of the digitized output {@link + * org.lcsim.event.RawCalorimeterHit RawCalorimeterHit} + * collection. + */ + private String outputHitCollectionName = null; + /** + * The name of the {@link org.lcsim.event.LCRelation LCRelation} + * collection that links output raw hits to the SLIC truth hits + * from which they were generated. + */ + private String truthRelationsCollectionName = null; + /** + * The name of the {@link org.lcsim.event.LCRelation LCRelation} + * collection that links output raw hits to the SLIC truth hits + * from which they were generated. This collection is output for + * trigger path hits, and is never persisted. + */ + private String triggerTruthRelationsCollectionName = null; + /** + * The name of the collection which contains readout hits. The + * class type of this collection will vary based on which mode + * the simulation is set to emulate. + */ + private String readoutCollectionName = null; + + // ============================================================== + // ==== Driver Options ========================================== + // ============================================================== + + /** + * Indicates whether or not noise should be simulated when + * converting truth energy depositions to the voltage amplitudes. + */ + private boolean addNoise = true; + /** + * Defines the number of photoelectrons per MeV of truth energy + * for the purpose of noise calculation. + */ + private double pePerMeV = Double.NaN; + /** + * Defines a fixed gain to be used for all subdetector channels. + * A negative value will result in gains being pulled from the + * conditions database for the run instead. Units are in MeV/ADC. + */ + private double fixedGain = -1; + /** + * Defines the pulse shape to use when simulating detector truth + * energy deposition response. + */ + private PulseShape pulseShape = PulseShape.ThreePole; + /** + * Defines the pulse time parameter. This influences the shape of + * a pulse generated from truth energy depositions and will vary + * depending on the form of pulse selected. Units are in ns. + */ + private double tp = Double.NaN; + /** + * Defines the ADC threshold needed to initiate pulse integration + * for raw hit creation. + */ + protected int integrationThreshold = 18; + /** + * Defines the number of integration samples that should be + * included in the pulse integral from before the sample that + * exceeds the integration threshold. + */ + protected int numSamplesBefore = 5; + /** + * Defines the number of integration samples that should be + * included in the pulse integral from after the sample that + * exceeds the integration threshold. + * Threshold-crossing sample is part of NSA. + */ + protected int numSamplesAfter = 25; + /** + * The format in which readout hits should be output. + */ + private int mode = 1; + /** + * Specifies whether trigger path hit truth information should be + * included in the driver output. + */ + private boolean writeTriggerTruth = false; + /** + * Specifies whether readout path truth information should be + * included in the driver output. + */ + private boolean writeTruth = false; + + // ============================================================== + // ==== Driver Parameters ======================================= + // ============================================================== + + /** + * Defines the length in nanoseconds of a hardware sample. + */ + private static final double READOUT_PERIOD = 4.0; + /** + * Serves as an internal clock variable for the driver. This is + * used to track the number of clock-cycles (1 per {@link + * org.hps.readout.ecal.updated.DigitizationReadoutDriver#READOUT_PERIOD + * READOUT_PERIOD}). + */ + private int readoutCounter = 0; + /** + * A buffer for storing pulse amplitudes representing the signals + * from the preamplifiers. These are stored in units of Volts + * with no pedestal. One buffer exists for each subdetector + * channel. + */ + private Map voltageBufferMap = new HashMap(); + /** + * Buffers the truth information for each sample period so that + * truth relations can be retained upon readout. + */ + private Map> truthBufferMap = new HashMap>(); + /** + * A buffer for storing ADC values representing the converted + * voltage values from the voltage buffers. These are stored in + * units of ADC and include a pedestal. One buffer exists for + * each subdetector channel. + */ + private Map adcBufferMap = new HashMap(); + + /** + * Stores the subdetector geometry object. + */ + private D geometry = null; + /** + * Stores the total ADC sums for each subdetector channel that is + * currently undergoing integration. + */ + private Map channelIntegrationSumMap = new HashMap(); + /** + * Stores the total ADC sums for each subdetector channel that is + * currently undergoing integration. + */ + private Map> channelIntegrationTruthMap = new HashMap>(); + /** + * Stores the time at which integration began on a given channel. + * This is used to track when the integration period has ended. + */ + private Map channelIntegrationTimeMap = new HashMap(); + // TODO: Give this documentation. + private Map> channelIntegrationADCMap = new HashMap>(); + /** + * Defines the time offset of objects produced by this driver + * from the actual true time that they should appear. + */ + private double localTimeOffset = 0; + /** + * Stores the minimum length of that must pass before a new hit + * may be integrated on a given channel. + * Unit: clock-cycle + */ + private static final int CHANNEL_INTEGRATION_DEADTIME = 8; + //private static final int CHANNEL_INTEGRATION_DEADTIME = 0; + /** + * Defines the total time range around the trigger time in which + * hits are output into the readout LCIO file. The trigger time + * position within this range is determined by {@link + * org.hps.readout.ecal.updated.DigitizationReadoutDriver#readoutOffset + * readoutOffset}. + */ + protected int readoutWindow = 48; + /** + * Sets how far from the beginning of the readout window trigger + * time should occur. A value of x, for instance would result in + * a window that starts at triggerTime - x and + * extends for a total time readoutWindow. + */ + // private int readoutOffset = 0; + + private int readoutOffset = -12; + + /** + * Sets time window of ADC samples in pulser data + */ + protected int pulserDataWindow = 48; + + /** + * To make time alignment between Ecal and hodoscope detectors, samples of + * pulser data may need to be shifted according to readout window offset + * difference between Ecal and hodoscope + */ + private int pulserSamplesShift = 0; + + + private double debugEnergyThresh=0.25; //only print debug for hits>500 MeV + + private boolean debug_=false; + + /** + * Defines the LCSim collection data for the trigger hits that + * are produced by this driver when it is emulating Mode-1 or + * Mode-3. + */ + private LCIOCollection mode13HitCollectionParams; + /** + * Defines the LCSim collection data for the trigger hits that + * are produced by this driver when it is emulating Mode-7. + */ + private LCIOCollection mode7HitCollectionParams; + /** + * Defines the LCSim collection data that links SLIC truth hits + * to the their corresponding simulated output hit. + */ + private LCIOCollection truthRelationsCollectionParams; + + /** + * Flag to point out that new integration could be started at a sample + * between CHANNEL_INTEGRATION_DEADTIME and numSamplesAfter + * for the case is less than numSamplesAfter + */ + private Map flagStartNewIntegration = new HashMap<>(); + + /** + * Since new integration could happen between CHANNEL_INTEGRATION_DEADTIME and numSamplesAfter, + * integration time needs to be assigned as parameter of ReadoutDataManager.addData(). + * Global displacement is 0 for dependency. + */ + private double integrationTime = Double.NaN; + + + // ============================================================== + // ==== To Be Re-Worked ========================================= + // ============================================================== + // TODO: We should be able to define these based on the integration parameters. + private static final int BUFFER_LENGTH = 100; + private static final int PIPELINE_LENGTH = 2000; + + @Override + public void startOfData() { + // Validate that all the collection names are defined. + if(truthHitCollectionName == null || PulserDataCollectionName == null || outputHitCollectionName == null || truthRelationsCollectionName == null + || triggerTruthRelationsCollectionName == null || readoutCollectionName == null) { + throw new RuntimeException("One or more collection names is not defined!"); + } + + // Calculate the correct time offset. This is a function of + // the integration samples and the output delay. + // Threshold-crossing sample is part of NSA. + // localTimeOffset = 4 * numSamplesAfter; + localTimeOffset = 0; + + // Validate that a real mode was selected. + if(mode != 1 && mode != 3 && mode != 7) { + throw new IllegalArgumentException("Error: Mode " + mode + " is not a supported output mode."); + } + + // Add the driver dependencies. + addDependency(truthHitCollectionName); + addDependency(PulserDataCollectionName); + + // Define the LCSim collection parameters for this driver's + // output. Note: Since these are not persisted, the flags and + // readout name are probably not necessary. + LCIOCollectionFactory.setCollectionName(outputHitCollectionName); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollectionFactory.setFlags((0 + (1 << LCIOConstants.CHBIT_LONG) + (1 << LCIOConstants.RCHBIT_ID1))); + LCIOCollectionFactory.setReadoutName(truthHitCollectionName); + LCIOCollection hitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawCalorimeterHit.class); + ReadoutDataManager.registerCollection(hitCollectionParams, false); + + LCIOCollectionFactory.setCollectionName(triggerTruthRelationsCollectionName); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollection triggerTruthCollectionParams = LCIOCollectionFactory.produceLCIOCollection(LCRelation.class); + ReadoutDataManager.registerCollection(triggerTruthCollectionParams, false); + + // Define the LCSim collection data for the on-trigger output. + LCIOCollectionFactory.setCollectionName(readoutCollectionName); + LCIOCollectionFactory.setProductionDriver(this); + mode13HitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawTrackerHit.class); + + LCIOCollectionFactory.setCollectionName(readoutCollectionName); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollectionFactory.setFlags(1 << LCIOConstants.RCHBIT_TIME); + mode7HitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawCalorimeterHit.class); + + LCIOCollectionFactory.setCollectionName(truthRelationsCollectionName); + LCIOCollectionFactory.setProductionDriver(this); + truthRelationsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(LCRelation.class); + + // Run the superclass method. + super.startOfData(); + } + + @SuppressWarnings("unchecked") + @Override + public void detectorChanged(Detector detector) { + // Throw an error if the geometry name is not set. + if(geometryName == null) { + throw new RuntimeException("Subdetector name is not defined!"); + } + + // Get the readout name from the subdetector geometry data. + geometry = (D) detector.getSubdetector(geometryName); + + // Update the output LCIO collections data. + LCIOCollectionFactory.setReadoutName(geometry.getReadout().getName()); + mode13HitCollectionParams = LCIOCollectionFactory.cloneCollection(mode13HitCollectionParams); + LCIOCollectionFactory.setReadoutName(geometry.getReadout().getName()); + mode7HitCollectionParams = LCIOCollectionFactory.cloneCollection(mode7HitCollectionParams); + + // Reinstantiate the buffers. + resetBuffers(); + } + + @Override + public void process(EventHeader event) { + + /* + * Get current SLIC hits and current raw hits in pulser data. + */ + + // Get current SLIC hits. + Collection hits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, + truthHitCollectionName, SimCalorimeterHit.class); + + // Get current raw hits in pulser data. + Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, + PulserDataCollectionName, RawTrackerHit.class); + if(debug_)System.out.println("DigiReadout:: "+truthHitCollectionName +" local time = "+ReadoutDataManager.getCurrentTime()+" number of hits = "+hits.size()); + // Once an overlaid event is input, reset adcBufferMap to ensure that other overlaid events do not affect the current event. + if(hits.size()!=0 || rawHits.size()!=0) { + // Get the set of all possible channel IDs. + Set cells = getChannelIDs(); + + // Reset adcBufferMap. + for(Long cellID : cells) + adcBufferMap.get(cellID).setAll((int) Math.round(getPedestalConditions(cellID))); + } + + /* To merge MC data with pulser data, three different cases are handled separately. + * Case 1: If pulser data does not have a channel in MC data, directly buffer samples + * + * Case 2: If MC data does not have a channel in pulser data, + * 1) add noise into MC hits + * 2) convert MC hits into a window of ADC samples + * 3) add pedestal + * 4) buffer samples + * + * Case 3: If MC data has a channel that is also in pulser data, + * 1) convert MC hits into a window of ADC samples + * 2) merge with samples of pulser data + * 3) buffer merged samples + * + * MC hits are digitized into ADC samples with the same time window of pulser data. + * Before the time window, the window is extended with NSB ADC samples, and values of the ADC samples are set as pedestal. + * After the time window, the window is extended with NSA ADC samples, and values of the ADC samples are set as pedestal. + * The extension is allowed since enough empty events are inserted into neighbored overlaid events. + */ + + // Add the truth hits to the truth hit buffer. The buffer is + // only incremented when the ADC buffer is incremented, which + // is handled below. + // Save cell IDs of hits as keys in the MC hit Cell ID hash map, and set values as 1. + // The hash map is used to check if MC data has a channel that is also in pulser data. + Map hitCellIDMap = new HashMap(hits.size()); + for(SimCalorimeterHit hit : hits) { + if(debug_) + System.out.println(this.getClass().getName()+":: process:: sim hit energy = "+hit.getRawEnergy()); + // Store the truth data. + Long hitCellID = hit.getCellID(); // For Ecal, cell ID is geometry ID; For hodo, cell ID is channel ID after hodoscope preprocessing + + ObjectRingBuffer hitBuffer = truthBufferMap.get(hitCellID); + hitBuffer.addToCell(0, hit); + + // Save cell IDs of hits as keys in the hit Cell ID hash map, and set values as 1. + if(hitCellIDMap.get(hitCellID) == null) + hitCellIDMap.put(hitCellID,1); + } + + // handle pulser data: case 1. + // If cellID of a raw hit is not included by keys in the MC hit Cell ID hash map for MC hits, directly buffer ADC samples. + // If included, set value as 2 for the corresponding key in the MC hit Cell ID hash map. + // Save raw hits in the raw hit hash map, where keys are raw hit cell IDs and values are raw hits. + // The hash map is used for case 3 + Map rawHitsMap = new HashMap(rawHits.size()); + for(RawTrackerHit rawHit : rawHits) { + Long rawHitID = getID(rawHit); // For Ecal, ID is geometry ID; For hodo, ID is channel ID, which is converted from geometry ID. + if(hitCellIDMap.get(rawHitID) == null) { + // Get the ADC buffer for the channel. + IntegerRingBuffer adcBuffer = adcBufferMap.get(rawHitID); + + // Get ADC samples for the channel. + short[] adcSamples = rawHit.getADCValues(); + + // Length of ADC sample array should be equal to setup for time window of ADC samples + if(adcSamples.length != pulserDataWindow) + throw new RuntimeException("Error: time window of pulser data is not correctly set."); + + // Buffer ADC samples in pulser data + for(int i = 0; i < pulserDataWindow; i++) + adcBuffer.setValue(i - pulserSamplesShift, (int)adcSamples[i]); + } + else { + hitCellIDMap.put(rawHitID, 2); + rawHitsMap.put(rawHitID, rawHit); + } + } + + // handle MC hits: case 2 and case 3 + // In the MC hit Cell ID hash map, if value for cell ID of a MC hit is 1, handle the hit as case 2. + // If value for cell ID of a MC hit is 2, handle the hit as case 3. + for(SimCalorimeterHit hit : hits) { + Long hitCellID = hit.getCellID(); + // Check to see if the hit time seems valid. This is done + // by calculating the time of the next readout cycle in + // ns and subtracting the time of the current hit (with + // adjustment for simulation time passed) from it. If the + // hit would fall in a previous readout cycle, something + // is probably wrong. + // if(READOUT_PERIOD + readoutTime() - (ReadoutDataManager.getCurrentTime() + hit.getTime()) >= READOUT_PERIOD) { + if(READOUT_PERIOD - hit.getTime() >= READOUT_PERIOD) { + throw new RuntimeException("Error: Trying to add a hit to the analog pipeline, but the time seems incorrect."); + } + + // Get the ADC buffer for the channel. + IntegerRingBuffer adcBuffer = adcBufferMap.get(hitCellID); + + // Get the pedestal for the channel. + int pedestal = (int) Math.round(getPedestalConditions(hitCellID)); + + // Get the buffer for the current truth hit's channel. + DoubleRingBuffer voltageBuffer = voltageBufferMap.get(hitCellID); + + // Get the truth hit energy deposition. + double energyAmplitude = hit.getRawEnergy(); + if(energyAmplitude>debugEnergyThresh && debug_){ + System.out.println(this.getClass().getName()+":: process:: Putting sim hits in adcBuffer cellID = "+hitCellID); + System.out.println(this.getClass().getName()+":: process:: adding hits to adcBuffer cellID = "+hitCellID); + + System.out.println(this.getClass().getName()+":: process:: ReadoutDataManager Time = "+ReadoutDataManager.getCurrentTime()); + System.out.println(this.getClass().getName()+":: process:: hit time = "+hit.getTime()); + System.out.println(this.getClass().getName()+":: process:: readouttime() = "+readoutTime()); + + + System.out.println(this.getClass().getName()+":: process:: truth energy = "+energyAmplitude); + } + if(hitCellIDMap.get(hitCellID) == 1) { + // If noise should be added, calculate a random value for + // the noise and add it to the truth energy deposition. + if(addNoise) { + energyAmplitude += getAmplitudeFluctuation(hit); + if(energyAmplitude>debugEnergyThresh&&debug_) + System.out.println(this.getClass().getName()+":: process:: added noise to energy; new energy = "+energyAmplitude); + } + + // Simulate the pulse for each position in the preamp + // pulse buffer for the subdetector channel on which the + // hit occurred. + if(energyAmplitude>debugEnergyThresh&&debug_) + System.out.println(this.getClass().getName()+":: process:: making pulse"); + for(int i = 0; i < pulserDataWindow; i++) { + // Calculate the voltage deposition for the current + // buffer time. + //double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD + readoutTime() + // - (ReadoutDataManager.getCurrentTime() + hit.getTime()) - getTimeShiftConditions(hitCellID), hitCellID); + + double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD + - hit.getTime() + - getTimeShiftConditions(hitCellID) + , hitCellID); + + if(energyAmplitude>debugEnergyThresh&&debug_){ + System.out.println(this.getClass().getName()+":: process:: pulse sample i = "+i + +" local time = "+((i + 1) * READOUT_PERIOD - hit.getTime() - getTimeShiftConditions(hitCellID)) + +" pulse amplitude = "+pulseAmplitude((i + 1) * READOUT_PERIOD + - hit.getTime() + - getTimeShiftConditions(hitCellID) + , hitCellID)); + System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i + +" voltage dep value = "+voltageDeposition); + } + // Increase the current buffer time's voltage value + // by the calculated amount. + voltageBuffer.addToCell(i, voltageDeposition); + + // Scale the current value of the preamplifier buffer + // to a 12-bit ADC value where the maximum represents + // a value of maxVolt. + double currentValue = voltageBuffer.getValue(i) * ((Math.pow(2, nBit) - 1) / maxVolt); + if(energyAmplitude>debugEnergyThresh&&debug_){ + System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i + +" pre-noise digitized value = "+currentValue); + } + // If noise should be added, calculate a random value for + // the noise and add it to the ADC value. + if(addNoise) { + double sigma = getNoiseConditions(hitCellID); + currentValue += RandomGaussian.getGaussian(0, sigma); + } + if(energyAmplitude>debugEnergyThresh&&debug_) + System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i + +" post-noise current value = "+currentValue); + // If noise should be added, calculate a random value for + // An ADC value is not allowed to exceed 4095. If a + // larger value is observed, 4096 (overflow) is given + // instead. (This corresponds to >2 Volts.) + int digitizedValue = Math.min((int) Math.round(pedestal + currentValue), (int) Math.pow(2, nBit)); + if(energyAmplitude>debugEnergyThresh&&debug_) + System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i + +" digitized value = "+digitizedValue); + + // Write this value to the ADC buffer. + adcBuffer.setValue(i, digitizedValue); + } + } + + else { + // Get ADC samples for the channel. + short[] ADCSamples = rawHitsMap.get(hitCellID).getADCValues(); + + // Get digitized samples for MC hits + int[] digitizedValue = new int[pulserDataWindow]; + + // Simulate the pulse for each position in the preamp + // pulse buffer for the subdetector channel on which the + // hit occurred. + + for(int i = 0; i < pulserDataWindow; i++) { + // Calculate the voltage deposition for the current + // buffer time. + double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD + - hit.getTime() - getTimeShiftConditions(hitCellID), hitCellID); + + // double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD + readoutTime() + // - (ReadoutDataManager.getCurrentTime() + hit.getTime()) - getTimeShiftConditions(hitCellID), hitCellID); + + // Increase the current buffer time's voltage value + // by the calculated amount. + voltageBuffer.addToCell(i, voltageDeposition); + + // Scale the current value of the preamplifier buffer + // to a 12-bit ADC value where the maximum represents + // a value of maxVolt. + double currentValue = voltageBuffer.getValue(i) * ((Math.pow(2, nBit) - 1) / maxVolt); + + // An ADC value is not allowed to exceed 4095. If a + // larger value is observed, 4096 (overflow) is given + // instead. (This corresponds to >2 Volts.) + digitizedValue[i] = Math.min((int) Math.round(currentValue), (int) Math.pow(2, nBit)); + } + + // Write this value to the ADC buffer. + // If pulserSamplesShift is larger than 0, merged sample window is [-pulserSamplesShift, pulserDataWindow] + if(pulserSamplesShift >= 0) { + for(int i = -pulserSamplesShift; i < 0; i++) adcBuffer.setValue(i , (int)ADCSamples[i + pulserSamplesShift]); + for(int i = 0; i < pulserDataWindow - pulserSamplesShift; i++) adcBuffer.setValue(i, digitizedValue[i] + ADCSamples[i + pulserSamplesShift]); + for(int i = pulserDataWindow - pulserSamplesShift; i < pulserDataWindow; i++) adcBuffer.setValue(i, digitizedValue[i]); + } + // If pulserSamplesShift is less than 0, merged sample window is [0, -pulserSamplesShift + pulserDataWindow] + else { + for(int i = 0; i < -pulserSamplesShift; i++) adcBuffer.setValue(i, digitizedValue[i]); + for(int i = -pulserSamplesShift; i < pulserDataWindow; i++) adcBuffer.setValue(i, digitizedValue[i] + ADCSamples[i + pulserSamplesShift]); + for(int i = pulserDataWindow; i < pulserDataWindow - pulserSamplesShift; i++) adcBuffer.setValue(i, (int)ADCSamples[i + pulserSamplesShift]); + } + } + } + + /* + * Next step is to integrate hits from the pulses. Hit + * integration is only performed once per readout period. The + * readout period, defined by the hardware, is by default 4 + * nanoseconds. + */ + + // Check whether the appropriate amount of time has passed to + // perform another integration step. If so, create a list to + // contain any newly integrated hits and perform integration. + List newHits = null; + List newTruthRelations = null; + if(newHits == null) { newHits = new ArrayList(); } + if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } + readoutCounter=0; + for(int i = 0; i < pulserDataWindow; i++){ + // System.out.println(this.getClass().getName()+":: looping over pulse data window readoutCounter = "+readoutCounter); + readHits(newHits, newTruthRelations); + readoutCounter++; + } + } + + // TODO: Document this. + private void readHits(List newHits, List newTruthRelations) { + // Perform hit integration as needed for each subdetector + // channel in the buffer map. + for(Long cellID : adcBufferMap.keySet()) { + // System.out.println("************** new channel ***************"); + // Get the ADC buffer for the channel. + IntegerRingBuffer adcBuffer = adcBufferMap.get(cellID); + + // Get the pedestal for the channel. + int pedestal = (int) Math.round(getPedestalConditions(cellID)); + + // Store the pedestal subtracted value so that it may + // be checked against the integration threshold. + int pedestalSubtractedValue = adcBuffer.getValue() - pedestal; + if(pedestalSubtractedValue > integrationThreshold && debug_){ + System.out.println(this.getClass().getName()+":: readHits:: Looping over adcBufferMap cellID = "+cellID); + System.out.println(this.getClass().getName()+":: readHits:: ped subtracted ADC counts = "+pedestalSubtractedValue); + } + // Get the total ADC value that has been integrated + // on this channel. + Integer sum = channelIntegrationSumMap.get(cellID); + if(pedestalSubtractedValue >integrationThreshold && debug_) + System.out.println(this.getClass().getName()+":: readHits:: sum = "+sum); + // If any readout hits exist on this channel, add the + // current ADC values to them. + + // If the ADC sum is undefined, then there is not an + // ongoing integration. If the pedestal subtracted + // value is also over the integration threshold, then + // integration should be initiated. + if(sum == null && pedestalSubtractedValue > integrationThreshold) { + // Store the current local time in units of + // events (4 ns). This will indicate when the + // integration started and, in turn, should end. + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: Found a hit above threshold = "+cellID); + + channelIntegrationTimeMap.put(cellID, readoutCounter); + + // Integrate the ADC values for a number of + // samples defined by NSB and threshold + // crossing sample. + int sumBefore = 0; + for(int i = 0; i <= numSamplesBefore; i++) { + sumBefore += adcBuffer.getValue(-(numSamplesBefore - i)); + } + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: sum before this sample = "+sumBefore); + // This will represent the total integral sum at + // the current point in time. Store it in the sum + // buffer so that it may be incremented later as + // additional samples are read. + channelIntegrationSumMap.put(cellID, sumBefore); + + // Collect and store truth information for trigger + // path hits. + channelIntegrationADCMap.put(cellID, new ArrayList()); + + // Get the truth information in the + // integration samples for this channel. + Set truthHits = new HashSet(); + for(int i = 0; i < numSamplesBefore + 4; i++) { + channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(-(numSamplesBefore - i))); + truthHits.addAll(truthBufferMap.get(cellID).getValue(-(numSamplesBefore - i))); + } + + // Store all the truth hits that occurred in + // the truth buffer in the integration period + // for this channel as well. These will be + // passed through the chain to allow for the + // accessing of truth information during the + // trigger simulation. + channelIntegrationTruthMap.put(cellID, truthHits); + } + + // If the integration sum is defined, then pulse + // integration is ongoing. + if(sum != null) { + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: integration is ongoing..."); + // Three cases are treated separataly + // Case 1: CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter + // Case 2: CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter + // Case 3: CHANNEL_INTEGRATION_DEADTIME < numSamplesAfter + if(CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter) { // Case 1 + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 1: DEADTIME>NSA"); + //Continue integration until NSA, the threshold-crossing sample has been added before. + if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); + + // Add the new ADC sample. + channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); + + // Add the new truth information, if trigger + // path truth output is enabled. + if (writeTriggerTruth) { + channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); + } + } + + // If integration is complete, a hit may be added + // to data manager. + else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager + // Add a new calorimeter hit. + RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, + 64 * channelIntegrationTimeMap.get(cellID)); + newHits.add(newHit); + // Cycle-clock for events is 2 ns, while cycle-clock for samples is 4 ns + integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; + // Add the truth relations for this hit, if + // trigger path truth is enabled. + if (writeTriggerTruth) { + Set truthHits = channelIntegrationTruthMap.get(cellID); + for (SimCalorimeterHit truthHit : truthHits) { + newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); + } + } + } + + // Do not clear the channel for integration until deadtime has passed. + // The threshold-crossing sample counts as the first sample in the deadtime. + else if (channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - 1 <= readoutCounter + - 1) { // No new integration until over deadtime + channelIntegrationSumMap.remove(cellID); + } + } // Case 1 ends + else if(CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter){ // Case 2 + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case2: DEADTIME==NSA"); + // Continue integration until NSA, the threshold-crossing sample has been added before. + if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); + + // Add the new ADC sample. + channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); + + // Add the new truth information, if trigger + // path truth output is enabled. + if (writeTriggerTruth) { + channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); + } + } + // If integration is complete, a hit may be added + // to data manager. + else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager + // Add a new calorimeter hit. + RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, + 64 * channelIntegrationTimeMap.get(cellID)); + newHits.add(newHit); + // Cycle-clock for events is 2 ns, while cycle-clock for samples is 4 ns + integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; + + // Add the truth relations for this hit, if + // trigger path truth is enabled. + if (writeTriggerTruth) { + Set truthHits = channelIntegrationTruthMap.get(cellID); + for (SimCalorimeterHit truthHit : truthHits) { + newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); + } + } + channelIntegrationSumMap.remove(cellID); + } + } // Case 2 ends + else { // Case 3 + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: DEADTIME= readoutCounter) { + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: time + deadtime - 1 >= readoutCounter"+(channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - 1)+">="+readoutCounter+"....just keep integrating "+cellID); + // Continue integration until CHANNEL_INTEGRATION_DEADTIME + channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); + + // Add the new ADC sample. + channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); + + // Add the new truth information, if trigger + // path truth output is enabled. + if (writeTriggerTruth) { + channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); + } + + // If sample at the end of deadtime is less than threshold, new integration could be started from next sample + if(channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME == readoutCounter && pedestalSubtractedValue <= integrationThreshold){ + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: time + deadtime - 1 >= readoutCounter ... at deadtime limit and below threshold, setting new integration flag to true"); + flagStartNewIntegration.put(cellID, true); + } + } + else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: time + NSA - 1 >= readoutCounter"+(channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1)+">="+readoutCounter+"....decide what to do "+cellID); + if(flagStartNewIntegration.get(cellID) == true) { // Flag for previous sample is true + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is true; starting new integration "+cellID); + if(pedestalSubtractedValue <= integrationThreshold) { // If sample is less than threshold, then do not start new integration + channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); + + // Add the new ADC sample. + channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); + + // Add the new truth information, if trigger + // path truth output is enabled. + if (writeTriggerTruth) { + channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); + } + } + else { // if sample is larger than threshold, a hit is added into data manager and start new integration + // Add a new calorimeter hit. + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: making new hit after new integration flag is true because sample is over threshold and new integration is starting!!! "+cellID); + RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, + 64 * channelIntegrationTimeMap.get(cellID)); + newHits.add(newHit); + integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; + + // Add the truth relations for this hit, if + // trigger path truth is enabled. + if (writeTriggerTruth) { + Set truthHits = channelIntegrationTruthMap.get(cellID); + for (SimCalorimeterHit truthHit : truthHits) { + newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); + } + } + + //Start new integration + channelIntegrationTimeMap.put(cellID, readoutCounter); + flagStartNewIntegration.put(cellID, false); + + // Integrate the ADC values for a number of + // samples defined by NSB from before threshold + // crossing. Note that this stops one sample + // before the current sample. This current sample + // is handled in the subsequent code block. + int sumBefore = 0; + for(int i = 0; i <= numSamplesBefore; i++) { + sumBefore += adcBuffer.getValue(-(numSamplesBefore - i)); + } + + // This will represent the total integral sum at + // the current point in time. Store it in the sum + // buffer so that it may be incremented later as + // additional samples are read. + channelIntegrationSumMap.put(cellID, sumBefore); + + // Collect and store truth information for trigger + // path hits. + channelIntegrationADCMap.put(cellID, new ArrayList()); + + // Get the truth information in the + // integration samples for this channel. + Set truthHits = new HashSet(); + for(int i = 0; i < numSamplesBefore + 4; i++) { + channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(-(numSamplesBefore - i))); + truthHits.addAll(truthBufferMap.get(cellID).getValue(-(numSamplesBefore - i))); + } + + // Store all the truth hits that occurred in + // the truth buffer in the integration period + // for this channel as well. These will be + // passed through the chain to allow for the + // accessing of truth information during the + // trigger simulation. + channelIntegrationTruthMap.put(cellID, truthHits); + } + } + else { // Flag for previous sample is false + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is false; just add new sample "+cellID); + channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); + + // Add the new ADC sample. + channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is false: integration sum = "+(sum + adcBuffer.getValue(0))); + // Add the new truth information, if trigger + // path truth output is enabled. + if (writeTriggerTruth) { + channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); + } + if(pedestalSubtractedValue <= integrationThreshold){ + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is false: went below threshold, setting flag to true"); + flagStartNewIntegration.put(cellID, true); + } + } + } + else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//If reach NSA + 1, hit is added into data manager, and flag is set as false + if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: reached NSA + 1; adding new hit "+cellID); + // Add a new calorimeter hit. + RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, + 64 * channelIntegrationTimeMap.get(cellID)); + newHits.add(newHit); + integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; + + // Add the truth relations for this hit, if + // trigger path truth is enabled. + if (writeTriggerTruth) { + Set truthHits = channelIntegrationTruthMap.get(cellID); + for (SimCalorimeterHit truthHit : truthHits) { + newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); + } + } + channelIntegrationSumMap.remove(cellID); + flagStartNewIntegration.put(cellID, false); + } + } // Case 3 ends + } + + // Step to the next entry in the adc buffer. + adcBuffer.stepForward(); + + // Step to the next entry in the voltage buffer. + if(voltageBufferMap.get(cellID) != null) { // A channel could be from pulser data, while MC data has no such channel. + DoubleRingBuffer voltageBuffer = voltageBufferMap.get(cellID); + voltageBuffer.clearValue(); + voltageBuffer.stepForward(); + } + + // Step the truth buffer for this channel forward. + // The new cell should be cleared of any old values. + if(truthBufferMap.get(cellID) != null) { // A channel could be from pulser data, while MC data has no such channel. + truthBufferMap.get(cellID).stepForward(); + truthBufferMap.get(cellID).clearValue(); + } + } + + // Write the trigger path output data to the readout data + // manager. Truth data is optional. + + if(debug_)System.out.println("DigiReadout:: "+ outputHitCollectionName+" local time = "+(ReadoutDataManager.getCurrentTime()+readoutTime())+" adding trigger hits = "+newHits.size()); + ReadoutDataManager.addData(outputHitCollectionName, ReadoutDataManager.getCurrentTime()+readoutTime(), newHits, RawCalorimeterHit.class); + newHits.clear(); //remove newHits since we've already put it in data manager + if(writeTriggerTruth) { + ReadoutDataManager.addData(triggerTruthRelationsCollectionName, integrationTime, newTruthRelations, LCRelation.class); + newTruthRelations.clear(); + } + + } + + /** + * Finds all root particles associated with the interactions that + * created the argument particle. + * @param particle - The particle. + * @return Returns a {@link java.util.List List} containing each + * particle object in the argument particle's particle tree which + * has no parent particle. + */ + private static final List getRootParticleList(MCParticle particle) { + // If the particle has no parents, it should be added to the + // list and the list returned. + if(particle.getParents().isEmpty()) { + List list = new ArrayList(1); + list.add(particle); + return list; + } + + // If there is only one parent, just return the results from + // that parent. + else if(particle.getParents().size() == 1) { + return getRootParticleList(particle.getParents().get(0)); + } + + // Otherwise, run the method on each parent particle and + // return the results from that instead. + else { + // Store the parent particle roots. + List list = new ArrayList(); + + // Get the root particles for each parent and add them to + // the list. + for(MCParticle parent : particle.getParents()) { + List parentParticles = getRootParticleList(parent); + list.addAll(parentParticles); + } + + // Return the compiled particle list. + return list; + } + } + + /** + * Flattens the particle tree to a set containing both the root + * particle and any particles that are descended from it. + * @param root - The root of the particle tree. + * @return Returns a set containing the argument particle and all + * of its descendants. + */ + private static final Set getParticleTreeAsSet(MCParticle root) { + // Create a set to store the particle tree. + Set particleSet = new HashSet(); + + // Add the root particle to the tree, and then recursively + // add any daughter particles to the tree. + particleSet.add(root); + addDaughtersToSet(root, particleSet); + + // Return the particle set. + return particleSet; + } + + /** + * Adds all the daughter particles of the argument to the set. + * Daughters of each daughter particle are then recursively added + * to the set as well. + * @param particle - The particle to add. + * @param set - The set to which to add the particle. + */ + private static final void addDaughtersToSet(MCParticle particle, Set set) { + // Add each daughter particle to the set, and recursively add + // its daughters as well. + for(MCParticle daughter : particle.getDaughters()) { + set.add(daughter); + addDaughtersToSet(daughter, set); + } + } + + /** + * Gets a {@link java.util.Set Set} containing all valid channel + * IDs for the relevant subdetector geometry. + * @return Returns a Set containing all possible + * channel IDs. + */ + protected abstract Set getChannelIDs(); + + /** + * Gets a channel ID through {@link org.lcsim.event.RawTrackerHit RawTrackerHit} + * @return Returns a ID. Return a geometry ID for Ecal, while return a channel ID for hodoscope + */ + protected abstract Long getID(RawTrackerHit hit); + + /** + * Gets the gain for the indicated subdetector channel. + * @param channelID - The channel ID. + * @return Returns the value of the gain in units of ADC/MeV as a + * double. + */ + protected abstract double getGainConditions(long channelID); + + /** + * Gets the noise sigma for the indicated subdetector channel. + * @param channelID - The channel ID. + * @return Returns the value of the noise sigma as a + * double. + */ + protected abstract double getNoiseConditions(long channelID); + + /** + * Gets the int flag used to denote the appropriate + * subdetector in relation to a readout timestamp. + * @return Returns the timestamp flag as an int. + */ + protected abstract int getTimestampFlag(); + + /** + * Generate photoelectron/amplification noise for a pulse's amplitude. + * @param hit - The hit for which to generate a fluctuation. + * @return Returns a fluctuation in units GeV. + */ + protected double getAmplitudeFluctuation(CalorimeterHit hit) { + double sigma = Math.sqrt(hit.getRawEnergy() * EcalUtils.MeV / pePerMeV); + return RandomGaussian.getGaussian(0, sigma); + } + + @Override + protected Collection> getOnTriggerData(double triggerTime) { + // Create a list to store the extra collections. + List> collectionsList = null; + if(writeTruth) { + collectionsList = new ArrayList>(5); + } else { + collectionsList = new ArrayList>(2); + } + + // Readout drivers need to produce readout timestamps to + // specify when they occurred in terms of simulation time. + // The readout timestamp for the subdetector data should be + // defined as the start simulation time of the ADC buffer. + ReadoutTimestamp timestamp = new ReadoutTimestamp(getTimestampFlag(), triggerTime - (readoutOffset * 4) + 4); + + // Make the readout timestamp collection parameters object. + LCIOCollectionFactory.setCollectionName(ReadoutTimestamp.collectionName); + LCIOCollection timestampCollection = LCIOCollectionFactory.produceLCIOCollection(ReadoutTimestamp.class); + TriggeredLCIOData timestampData = new TriggeredLCIOData(timestampCollection); + timestampData.getData().add(timestamp); + collectionsList.add(timestampData); + + // Instantiate some lists to store truth data, if truth is to + // be output. + List triggerTruthHits = null; + List triggerTruthRelations = null; + if(writeTruth) { + triggerTruthHits = new ArrayList(); + triggerTruthRelations = new ArrayList(); + } + + // Get the appropriate collection of readout hits and output + // them to the readout data manager. + if(debug_)System.out.println(this.getClass().getName()+":: getting mode = "+mode+" hits on trigger time = "+triggerTime); + if(mode == 7) { + List readoutHits = getMode7Hits(triggerTime); + TriggeredLCIOData readoutData = new TriggeredLCIOData(mode7HitCollectionParams); + readoutData.getData().addAll(readoutHits); + collectionsList.add(readoutData); + } else { + List readoutHits = null; + if(mode == 1) { readoutHits = getMode1Hits(triggerTime); } + else { readoutHits = getMode3Hits(triggerTime); } + TriggeredLCIOData readoutData = new TriggeredLCIOData(mode13HitCollectionParams); + readoutData.getData().addAll(readoutHits); + collectionsList.add(readoutData); + + // FIXME: Truth information is currently only supported for Mode-1 operation. + if(writeTruth && mode == 1) { + for(RawTrackerHit hit : readoutHits) { + Collection truthHits = getTriggerTruthValues(hit.getCellID(), triggerTime); + triggerTruthHits.addAll(truthHits); + for(CalorimeterHit truthHit : truthHits) { + triggerTruthRelations.add(new BaseLCRelation(hit, truthHit)); + } + } + } + } + + // Add the truth collections if they exist. + if(writeTruth) { + // Add the truth hits to the output collection. + LCIOCollection truthHitCollection = ReadoutDataManager.getCollectionParameters(truthHitCollectionName, SimCalorimeterHit.class); + TriggeredLCIOData truthData = new TriggeredLCIOData(truthHitCollection); + truthData.getData().addAll(triggerTruthHits); + collectionsList.add(truthData); + + // MC particles need to be extracted from the truth hits + // and included in the readout data to ensure that the + // full truth chain is available. + Set truthParticles = new java.util.HashSet(); + for(SimCalorimeterHit simHit : triggerTruthHits) { + for(int i = 0; i < simHit.getMCParticleCount(); i++) { + List rootParticles = getRootParticleList(simHit.getMCParticle(i)); + for(MCParticle rootParticle : rootParticles) { + truthParticles.addAll(getParticleTreeAsSet(rootParticle)); + } + } + } + + // Create the truth MC particle collection. + LCIOCollection truthParticleCollection = ReadoutDataManager.getCollectionParameters("MCParticle", MCParticle.class); + TriggeredLCIOData truthParticleData = new TriggeredLCIOData(truthParticleCollection); + truthParticleData.getData().addAll(truthParticles); + collectionsList.add(truthParticleData); + + // Add the truth relations to the output data. + TriggeredLCIOData truthRelations = new TriggeredLCIOData(truthRelationsCollectionParams); + truthRelations.getData().addAll(triggerTruthRelations); + collectionsList.add(truthRelations); + } + + // Return the extra trigger collections. + return collectionsList; + } + + /** + * Gets the pedestal for the indicated subdetector channel. + * @param channelID - The channel ID. + * @return Returns the value of the pedestal in units of ADC as a + * double. + */ + protected abstract double getPedestalConditions(long channelID); + + + @Override + protected boolean isPersistent() { + throw new UnsupportedOperationException(); + } + + @Override + protected double getReadoutWindowAfter() { + throw new UnsupportedOperationException(); + } + + @Override + protected double getReadoutWindowBefore() { + throw new UnsupportedOperationException(); + } + + @Override + protected double getTimeDisplacement() { + return localTimeOffset; + } + + @Override + protected double getTimeNeededForLocalOutput() { + return (readoutWindow - readoutOffset) * 4.0; + } + + /** + * Gets the time shift for the indicated subdetector channel. + * @param channelID - The channel ID. + * @return Returns the value of the time shift in units of ns as + * a double. + */ + protected abstract double getTimeShiftConditions(long channelID); + + /** + * Gets the subdetector geometry object. + * @return Returns the subdetector geometry object. This will be + * an object of parameterized type D, which will is + * a subclass of {@link org.lcsim.geometry.compact.Subdetector + * Subdetector}. + */ + protected D getSubdetector() { + return geometry; + } + + /** + * Clones an object of type {@link org.lcsim.event.CalorimeterHit + * CalorimeterHit} and returns a copy that is shifted in time by + * the specified amount. + * @param hit - The hit to clone. + * @param newTime - The new time for the hit. + * @return Returns a time-shifted hit as an object of type {@link + * org.lcsim.event.CalorimeterHit CalorimeterHit}, unless the + * input hit was a {@link org.lcsim.event.SimCalorimeterHit + * SimCalorimeterHit} object, in which case the truth information + * will be retained. + */ + private static final CalorimeterHit cloneHitToTime(CalorimeterHit hit, double newTime) { + if(hit instanceof SimCalorimeterHit) { + // Cast the hit to a simulated calorimeter hit. + SimCalorimeterHit simHit = (SimCalorimeterHit) hit; + + // Create the necessary data objects to clone the + // hit. + int[] pdgs = new int[simHit.getMCParticleCount()]; + float[] times = new float[simHit.getMCParticleCount()]; + float[] energies = new float[simHit.getMCParticleCount()]; + Object[] particles = new Object[simHit.getMCParticleCount()]; + for(int i = 0; i < simHit.getMCParticleCount(); i++) { + particles[i] = simHit.getMCParticle(i); + pdgs[i] = simHit.getMCParticle(i).getPDGID(); + + // Note -- Despite returning the value for these + // methods as a double, they are actually stored + // internally as floats, so this case is always safe. + // Note -- Hit times are calculated based on the time + // of each of the contributing truth particles. This + // means that we have to give a fake truth time to + // actually get the correct hit time. + times[i] = (float) newTime; + energies[i] = (float) simHit.getContributedEnergy(i); + } + + // Create the new hit and shift its time position. + BaseSimCalorimeterHit cloneHit = new BaseSimCalorimeterHit(simHit.getCellID(), simHit.getRawEnergy(), newTime, + particles, energies, times, pdgs, simHit.getMetaData()); + + // Return the hit. + return cloneHit; + } else { + return new BaseCalorimeterHit(hit.getRawEnergy(), hit.getCorrectedEnergy(), hit.getEnergyError(), newTime, + hit.getCellID(), hit.getPositionVec(), hit.getType(), hit.getMetaData()); + } + } + + /** + * Gets the value of the pulse-shape Guassian function for the + * given parameters. + * @param t + * @param sig + * @return Returns the value of the function as a + * double. + */ + private static final double funcGaus(double t, double sig) { + return Math.exp(-t * t / (2 * sig * sig)); + } + + /** + * Generates the hits which should be output for a given trigger + * time in Mode-1 format. + * @param triggerTime - The trigger time. + * @return Returns the readout hits for the given trigger time as + * Mode-1 hits. + */ + private List getMode1Hits(double triggerTime) { + // Create a list to store the Mode-1 hits. + List hits = new ArrayList(); + if(debug_)System.out.println(this.getClass().getName()+":: getting mode1Hits for trigger time = "+triggerTime+" and readout window = "+readoutWindow); + // Iterate over each channel. + for(Long cellID : adcBufferMap.keySet()) { + // Get the ADC values at the time of the trigger. + short[] adcValues = getTriggerADCValues(cellID, triggerTime); + if(debug_){ + Collection simHits=getTriggerTruthValues(cellID, triggerTime); + if(simHits.size()==0) + System.out.println(this.getClass().getName()+":: no sim cal hits in this channel"); + else{ + for( SimCalorimeterHit hit: simHits) + System.out.println(this.getClass().getName()+":: sim cal hit in this channel with energy = "+hit.getRawEnergy()); + } + } + + // Iterate across the ADC values. If the ADC value is + // sufficiently high to produce a hit, then it should be + // written out. + boolean isAboveThreshold = false; + for(int i = 0; i < adcValues.length; i++) { + // Check that there is a threshold-crossing at some + // point in the ADC buffer. + if(adcValues[i] > getPedestalConditions(cellID) + integrationThreshold) { + if(debug_)System.out.println(this.getClass().getName()+":: found an adc value above threshold for cellID = "+cellID); + isAboveThreshold = true; + break; + } + } + + // If so, create a new hit and add it to the list. + if(isAboveThreshold) { + hits.add(new BaseRawTrackerHit(cellID, 0, adcValues)); + } + } + + // Return the hits. + return hits; + } + + /** + * Generates the hits which should be output for a given trigger + * time in Mode-3 format. + * @param triggerTime - The trigger time. + * @return Returns the readout hits for the given trigger time as + * Mode-3 hits. + */ + private List getMode3Hits(double triggerTime) { + // Create a list to store the Mode-3 hits. + List hits = new ArrayList(); + + // Iterate across the ADC values and extract Mode-3 hits. + for(Long cellID : adcBufferMap.keySet()) { + int pointerOffset = 0; + int numSamplesToRead = 0; + int thresholdCrossing = 0; + short[] adcValues = null; + short[] window = getTriggerADCValues(cellID, triggerTime); + + for(int i = 0; i < ReadoutDataManager.getReadoutWindow(); i++) { + if(numSamplesToRead != 0) { + adcValues[adcValues.length - numSamplesToRead] = window[i - pointerOffset]; + numSamplesToRead--; + if (numSamplesToRead == 0) { + hits.add(new BaseRawTrackerHit(cellID, thresholdCrossing, adcValues)); + } + } else if ((i == 0 || window[i - 1] <= getPedestalConditions(cellID) + integrationThreshold) && window[i] + > getPedestalConditions(cellID) + integrationThreshold) { + thresholdCrossing = i; + pointerOffset = Math.min(numSamplesBefore, i); + numSamplesToRead = pointerOffset + Math.min(numSamplesAfter, ReadoutDataManager.getReadoutWindow() - i - pointerOffset - 1); + adcValues = new short[numSamplesToRead]; + } + } + } + + // Return the hits. + return hits; + } + + /** + * Generates the hits which should be output for a given trigger + * time in Mode-7 format. + * @param triggerTime - The trigger time. + * @return Returns the readout hits for the given trigger time as + * Mode-7 hits. + */ + private List getMode7Hits(double triggerTime) { + // Create a list to store the Mode-7 hits. + List hits = new ArrayList(); + + // Iterate across the ADC values and extract Mode-7 hits. + for(Long cellID : adcBufferMap.keySet()) { + int adcSum = 0; + int pointerOffset = 0; + int numSamplesToRead = 0; + int thresholdCrossing = 0; + short[] window = getTriggerADCValues(cellID, triggerTime); + + // Generate Mode-7 hits. + if(window != null) { + for(int i = 0; i < ReadoutDataManager.getReadoutWindow(); i++) { + if (numSamplesToRead != 0) { + adcSum += window[i - pointerOffset]; + numSamplesToRead--; + if(numSamplesToRead == 0) { + hits.add(new BaseRawCalorimeterHit(cellID, adcSum, 64 * thresholdCrossing)); + } + } else if((i == 0 || window[i - 1] <= getPedestalConditions(cellID) + integrationThreshold) + && window[i] > getPedestalConditions(cellID) + integrationThreshold) { + thresholdCrossing = i; + pointerOffset = Math.min(numSamplesBefore, i); + numSamplesToRead = pointerOffset + Math.min(numSamplesAfter, ReadoutDataManager.getReadoutWindow() - i - pointerOffset - 1); + adcSum = 0; + } + } + } + } + + // Return the hits. + return hits; + } + + private int getReadoutLatency(double triggerTime) { + return ((int) ((ReadoutDataManager.getCurrentTime() - triggerTime) / 4.0)) + readoutOffset; + } + + /** + * Gets the ADC values for the trigger readout window for the + * requested cell ID and returns them as a short + * primitive array. + * @param cellID - The ID for the channel of the requested ADC + * value array. + * @param triggerTime - The time of the trigger to be written. + * @return Returns the ADC values in a time range equal to the + * readout window positioned around the trigger time as array of + * short primitives. + */ + private short[] getTriggerADCValues(long cellID, double triggerTime) { + // Calculate the offset between the current position and the + // trigger time. + int readoutLatency = getReadoutLatency(triggerTime); + + // Get the ADC pipeline. + IntegerRingBuffer pipeline = adcBufferMap.get(cellID); + + // Extract the ADC values for the requested channel. + short[] adcValues = new short[readoutWindow]; + if(debug_)System.out.println(this.getClass().getName()+":: getTriggerADCValues:: latency = "+readoutLatency); + for(int i = 0; i < readoutWindow; i++) { + adcValues[i] = (short) pipeline.getValue(-(readoutLatency - i - 1)).intValue(); + if(debug_) + System.out.println(this.getClass().getName()+":: getTriggerADCValues:: "+" pipeline index = "+ (-(readoutLatency - i - 1)) + +" adcValue["+i+"] = "+adcValues[i]); + } + + // Return the result. + return adcValues; + } + + /** + * Gets a list of all truth hits that occurred in the ADC output + * window around a given trigger time from the truth buffer. + * @param cellID - The channel ID. + * @param triggerTime - The trigger time. + * @return Returns all truth hits that occurred within the ADC + * readout window around the trigger time for the specified + * channel. + */ + private Collection getTriggerTruthValues(long cellID, double triggerTime) { + // Calculate the offset between the current position and the + // trigger time. + int readoutLatency = getReadoutLatency(triggerTime); + + // Get the truth pipeline. + ObjectRingBuffer pipeline = truthBufferMap.get(cellID); + + // Extract the truth for the requested channel. Note that one + // extra sample is included over the range of ADC samples as + // sometimes, the truth hit occurs a little earlier than may + // be expected due to a delay from pulse propagation. + double baseHitTime = 0; + List channelHits = new ArrayList(); + for(int i = 0; i < readoutWindow + 4; i++) { + // Hit times should be specified with respect to the + // start of the readout window. + for(SimCalorimeterHit hit : pipeline.getValue(-(readoutLatency - i))) { + channelHits.add((SimCalorimeterHit) cloneHitToTime(hit, baseHitTime)); + } + + // Increment the base hit time. + baseHitTime += 4.0; + } + + // Return the result. + return channelHits; + } + + /** + * Returns pulse amplitude at the given time (relative to hit time). Gain is + * applied. + * + * @param time Units of ns. Relative to hit time (negative=before the start + * of the pulse). + * @param cellID Crystal ID as returned by hit.getCellID(). + * @return Amplitude, units of volts/GeV. + */ + private double pulseAmplitude(double time, long cellID) { + //normalization constant from cal gain (MeV/integral bit) to amplitude gain (amplitude bit/GeV) + // Determine the gain. Gain may either be fixed across all + // channels, or be obtained from the conditions database + // depending on the behavior defined in the steering file. + // The gain should also be normalized. + double gain; + if(fixedGain > 0) { + gain = READOUT_PERIOD / (fixedGain * EcalUtils.MeV * ((Math.pow(2, nBit) - 1) / maxVolt)); + } else { + gain = READOUT_PERIOD / (getGainConditions(cellID) * EcalUtils.MeV * ((Math.pow(2, nBit) - 1) / maxVolt)); + } + + // Calculate the correct pulse amplitude and return it. + return gain * pulseAmplitude(time, pulseShape, tp); + } + + /** + * Calculates the amplitude of a pulse at the given time, where + * the time is relative to the hit time, and for a given pulse + * shape. + * @param time - The time in the pulse. This is in units of ns + * and is relative to the hit time. A negative value represents + * the pulse shape before the hit occurs. + * @param shape - The type of pulse for which the calculation is + * to be performed. + * @param shapingTime - A fitting parameter that influences the + * shape of the pulse. + * @return Returns the pulse amplitude in units of inverse ns. + * The amplitude is normalized so that the pulse integral is one. + */ + private static final double pulseAmplitude(double time, PulseShape shape, double shapingTime) { + // There can not be a pulse response from a hit that has not + // occurred yet, so any time before zero must produce a pulse + // amplitude of zero as well. + if(time <= 0.0) { + return 0.0; + } + + // Perform the calculation appropriate to the specified pulse + // shape. + switch (shape) { + case CRRC: + // Peak Location: tp + // Peak Value: 1/(tp * e) + return ((time / (shapingTime * shapingTime)) * Math.exp(-time / shapingTime)); + case DoubleGaussian: + // According to measurements, the output signal can + // be fitted by two Gaussians: one for the rise of + // the signal and one for the fall. + // Peak Location: 3 * riseTime + // Peak Value: 1/norm + double norm = ((riseTime + fallTime) / 2) * Math.sqrt(2 * Math.PI); //to ensure the total integral is equal to 1: = 33.8 + return funcGaus(time - 3 * riseTime, (time < 3 * riseTime) ? riseTime : fallTime) / norm; + case ThreePole: + // Peak Location: 2 * tp + // Peak Value: 2/(tp * e^2) + return ((time * time / (2 * shapingTime * shapingTime * shapingTime)) * Math.exp(-time / shapingTime)); + default: + return 0.0; + } + } + + /** + * Gets the local time for this driver. + * @return Returns the local time for this driver. + */ + private double readoutTime() { + return readoutCounter * READOUT_PERIOD; + //return ReadoutDataManager.getCurrentTime(); + } + + /** + * Resets the driver buffers to their default values. + * @return Returns true if the buffers were reset + * successfully, and false if they were not. + */ + private void resetBuffers() { + // Reset each of the buffer maps. + adcBufferMap.clear(); + truthBufferMap.clear(); + voltageBufferMap.clear(); + + // Get the set of all possible channel IDs. + Set cells = getChannelIDs(); + + // Insert a new buffer for each channel ID. + for(Long cellID : cells) { + voltageBufferMap.put(cellID, new DoubleRingBuffer(BUFFER_LENGTH)); + truthBufferMap.put(cellID, new ObjectRingBuffer(PIPELINE_LENGTH)); + adcBufferMap.put(cellID, new IntegerRingBuffer(PIPELINE_LENGTH, (int) Math.round(getPedestalConditions(cellID)))); + + truthBufferMap.get(cellID).stepForward(); + + flagStartNewIntegration.put(cellID, false); + } + } + + /** + * Sets whether randomized noise should be added to SLIC truth + * energy depositions when simulating subdetector hits. This is + * true by default. + * @param state - true means that noise will be + * added and false that it will not. + */ + public void setAddNoise(boolean state) { + addNoise = state; + } + + /** + * Defines the name of the subdetector geometry object. + * @param ecalName - The subdetector name. + */ + public void setGeometryName(String value) { + geometryName = value; + } + + /** + * Sets a single uniform value for the gain on all channels. This + * will override the conditions database value. If set negative, + * the conditions database values will be used instead. Gains are + * defined in units of MeV/ADC. This defaults to -1. + * @param value - The uniform gain to be employed across all + * channels in units of MeV/ADC. A negative value indicates to + * use the conditions database values. + */ + public void setFixedGain(double value) { + fixedGain = value; + } + + /** + * Sets the threshold that a pulse sample must exceed before + * pulse integration may commence. Units are in ADC and the + * default value is 12 ADC. + * @param value - The pulse integration threshold, in units of + * ADC. + */ + public void setIntegrationThreshold(int value) { + integrationThreshold = value; + } + + /** + * Sets the name of the input truth hit collection name. + * @param collection - The collection name. + */ + public void setInputHitCollectionName(String collection) { + truthHitCollectionName = collection; + } + + /** + * Sets the name of the input pulser data collection name. + * @param collection - The collection name. + */ + public void setInputPulserDataCollectionName(String collection) { + PulserDataCollectionName = collection; + } + + /** + * Sets the operational mode of the simulation. This affects the + * form of the readout hit output. Mode may be set to the values + * 1, 3, or 7. + * @param value - The operational mode. + */ + public void setMode(int value) { + mode = value; + } + + /** + * Defines the number of samples from after a threshold-crossing + * pulse sample that should be included in the pulse integral. + * Units are in clock-cycles (4 ns samples) and the default value + * is 20 samples. + * @param value - The number of samples. + */ + public void setNumberSamplesAfter(int value) { + numSamplesAfter = value; + } + + /** + * Defines the number of samples from before a threshold-crossing + * pulse sample that should be included in the pulse integral. + * Units are in clock-cycles (4 ns samples) and the default value + * is 5 samples. + * @param value - The number of samples. + */ + public void setNumberSamplesBefore(int value) { + numSamplesBefore = value; + } + + /** + * Sets the name of the hits produced by this driver for use in + * the trigger simulation.

+ * Note that this is not the name of the collection output when a + * trigger occurs. For this value, see the method {@link + * org.hps.readout.ecal.updated.DigitizationReadoutDriver#setReadoutHitCollectionName(String) + * setReadoutHitCollectionName(String)} instead. + * @param collection - The collection name. + */ + public void setOutputHitCollectionName(String collection) { + outputHitCollectionName = collection; + } + + @Override + public void setPersistent(boolean state) { + throw new UnsupportedOperationException(); + } + + /** + * Sets the number of photoelectrons per MeV of deposited energy. + * This value is used in the simulation of subdetector hit noise + * due to photoelectron statistics. + * @param value - The number of photoelectrons per MeV. + */ + public void setPhotoelectronsPerMeV(double value) { + pePerMeV = value; + } + + /** + * Sets the pulse-shape model used to simulate pre-amplifier + * pulses. The default value is ThreePole. + * @param pulseShape - The name of the pulse shape model that is + * to be employed. Valid options are ThreePole, + * DoubleGaussian, or CRRC. + */ + public void setPulseShape(String pulseShape) { + this.pulseShape = PulseShape.valueOf(pulseShape); + } + + /** + * Sets the shaper time parameter for pulse simulation. The value + * depends on the pulse shape selected. For the default pulse + * shape ThreePole, it is equal to the RC, or half + * the peaking time (9.6 ns). + * @param value The pulse time parameter in units of nanoseconds. + */ + public void setPulseTimeParameter(double value) { + tp = value; + } + + /** + * Sets the name of the triggered hit output collection. This + * collection will hold all hits produced when a trigger occurs. + *

+ * Note that this collection is different from the hits produced + * for internal usage by the readout simulation. For this value, + * see the method {@link + * org.hps.readout.ecal.updated.DigitizationReadoutDriver#setOutputHitCollectionName(String) + * setOutputHitCollectionName(String)} instead. + * @param collection - The collection name. + */ + public void setReadoutHitCollectionName(String collection) { + readoutCollectionName = collection; + } + + /** + * Sets the number of samples by which readout hit pulse-crossing + * samples should be offset. Units are in clock-cycles (intervals + * of 4 ns). + * @param value - The offset of the pulse-crossing sample in + * units of clock-cycles (4 ns intervals). + */ + public void setReadoutOffset(int value) { + readoutOffset = value; + } + + /** + * Sets time window of ADC samples in pulser data. + * Units are in clock-cycles (intervals of 4 ns). + * @param value - The time window of ADC samples in pulser data in + * units of clock-cycles (4 ns intervals). + */ + public void setPulserDataWindow(int value) { + pulserDataWindow = value; + } + + /** + * Sets sample shift between Ecal and hodoscope detectors. + * The shift is equal to (Hodo_readout_offset - Ecal_readout_offset) / 4. + * @param value - The shift of ADC samples in pulser data in + * units of clock-cycles (4 ns intervals). + */ + public void setPulserSamplesShift(int value) { + pulserSamplesShift = value; + } + + /** + * Sets the size of the readout window, in units of 4 ns samples. + * @param value - The readout window. + */ + public void setReadoutWindow(int value) { + readoutWindow = value; + } + + @Override + public void setReadoutWindowAfter(double value) { + throw new UnsupportedOperationException(); + } + + @Override + public void setReadoutWindowBefore(double value) { + throw new UnsupportedOperationException(); + } + + /** + * Sets the name of the collection which contains the relations + * between truth hits from SLIC and the calorimeter hit output. + * This is specifically for the trigger path hits. + * @param collection - The collection name. + */ + public void setTriggerPathTruthRelationsCollectionName(String collection) { + triggerTruthRelationsCollectionName = collection; + } + + /** + * Sets the name of the collection which contains the relations + * between truth hits from SLIC and the calorimeter hit output. + * This is specifically for the readout path hits. + * @param collection - The collection name. + */ + public void setTruthRelationsCollectionName(String collection) { + truthRelationsCollectionName = collection; + } + + /** + * Sets whether subdetector truth data for trigger path hits is + * to be produced or not. + * @param state - true indicates that the truth data + * should be created, and false that it should not. + */ + public void setWriteTriggerPathTruth(boolean state) { + writeTriggerTruth = state; + } + + /** + * Sets whether subdetector truth data for readout path hits is + * to be written to the output LCIO file or not. + * @param state - true indicates that the truth data + * should be written, and false that it should not. + */ + public void setWriteTruth(boolean state) { + writeTruth = state; + } + + /** + * Enumerable PulseShape defines the allowed types + * of pulses that may be used to emulate the subdetector response + * to incident energy. + * + * @author Sho Uemura + */ + public enum PulseShape { + CRRC, DoubleGaussian, ThreePole + } +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EcalDigiWithPulseNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EcalDigiWithPulseNoSpacingReadoutDriver.java new file mode 100644 index 000000000..b1c505f17 --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/EcalDigiWithPulseNoSpacingReadoutDriver.java @@ -0,0 +1,153 @@ +package org.hps.digi.nospacing; + +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.util.Set; + +import org.hps.readout.ReadoutDriver; +import org.hps.conditions.database.DatabaseConditionsManager; +import org.hps.conditions.ecal.EcalChannelConstants; +import org.hps.conditions.ecal.EcalConditions; +import org.hps.readout.ReadoutTimestamp; +import org.hps.recon.ecal.EcalUtils; +import org.lcsim.event.RawTrackerHit; +import org.lcsim.geometry.Detector; +import org.lcsim.geometry.subdetector.HPSEcal3; +import org.hps.record.daqconfig2019.ConfigurationManager2019; +import org.hps.record.daqconfig2019.DAQConfig2019; +import org.hps.record.daqconfig2019.FADCConfigEcal2019; +import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection; + +/** + * Class EcalDigiWithPulseNoSpacingReadoutDriver is an implementation of the + * {@link org.hps.digi.nospacing.CalDigiWithPulserNoSpacingReadoutDriver} for a subdetector of type {@link + * org.lcsim.geometry.subdetector.HPSEcal3 HPSEcal3}. It handles all + * of the calorimeter-specific functions needed by the superclass. + * + * @author Tongtong Cao + */ +public class EcalDigiWithPulseNoSpacingReadoutDriver extends CalDigiWithPulserNoSpacingReadoutDriver { + // The DAQ configuration manager for FADC parameters. + private FADCConfigEcal2019 config = new FADCConfigEcal2019(); + private boolean configStat = false; // Indicates if DAQ configuration is loaded + + // The number of nanoseconds in a clock-cycle (sample). + private static final int nsPerSample = 4; + + + /** Stores the conditions for this subdetector. */ + private EcalConditions ecalConditions = null; + + /** Stores the channel collection for this subdetector. */ + private EcalChannelCollection geoMap = new EcalChannelCollection(); + + public EcalDigiWithPulseNoSpacingReadoutDriver() { + // Set the default values for each subdetector-dependent + // parameter. + setGeometryName("Ecal"); + + setInputHitCollectionName("EcalHits"); + setOutputHitCollectionName("EcalRawHits"); + setTruthRelationsCollectionName("EcalTruthRelations"); + setTriggerPathTruthRelationsCollectionName("TriggerPathTruthRelations"); + setReadoutHitCollectionName("EcalReadoutHits"); + + setPhotoelectronsPerMeV(EcalUtils.photoelectronsPerMeV); + setPulseTimeParameter(9.6); + } + + /** + * Sets whether or not the DAQ configuration is applied into the driver + * the EvIO data stream or whether to read the configuration from data files. + * + * @param state - true indicates that the DAQ configuration is + * applied into the readout system, and false that it + * is not applied into the readout system. + */ + public void setDaqConfigurationAppliedintoReadout(boolean state) { + // If the DAQ configuration should be read, attach a listener + // to track when it updates. + if (state) { + ConfigurationManager2019.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + // Get the DAQ configuration. + DAQConfig2019 daq = ConfigurationManager2019.getInstance(); + + // Load the DAQ settings from the configuration manager. + numSamplesAfter = daq.getEcalFADCConfig().getNSA() / nsPerSample; + numSamplesBefore = daq.getEcalFADCConfig().getNSB() / nsPerSample; + readoutWindow = daq.getEcalFADCConfig().getWindowWidth() / nsPerSample; + pulserDataWindow = readoutWindow; + + // Get the FADC configuration. + config = daq.getEcalFADCConfig(); + configStat = true; + } + }); + } + } + + + @Override + public void detectorChanged(Detector detector) { + // Get a copy of the calorimeter conditions for the detector. + ecalConditions = DatabaseConditionsManager.getInstance().getEcalConditions(); + + // Store the calorimeter conditions table for converting between + // geometric IDs and channel objects. + geoMap = DatabaseConditionsManager.getInstance().getCachedConditions(EcalChannelCollection.class, "ecal_channels").getCachedData(); + + // Run the superclass method. + super.detectorChanged(detector); + } + + @Override + protected Set getChannelIDs() { + return getSubdetector().getNeighborMap().keySet(); + } + + @Override + protected Long getID(RawTrackerHit hit) { + return hit.getCellID(); + } + + @Override + protected double getGainConditions(long cellID) { + return findChannel(cellID).getGain().getGain(); + } + + @Override + protected double getNoiseConditions(long channelID) { + return findChannel(channelID).getCalibration().getNoise(); + } + + protected double getPedestalConditions(long cellID) { + return findChannel(cellID).getCalibration().getPedestal(); + + } + + @Override + protected double getTimeShiftConditions(long cellID) { + return findChannel(cellID).getTimeShift().getTimeShift(); + } + + @Override + protected int getTimestampFlag() { + return ReadoutTimestamp.SYSTEM_ECAL; + } + + /** + * Gets the channel parameters for a given channel ID. + * @param cellID - The long ID value that represents + * the channel. This is typically acquired from the method {@link + * org.lcsim.event.CalorimeterHit#getCellID() getCellID()} in a + * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} object. + * @return Returns the channel parameters for the channel as an + * {@link org.hps.conditions.ecal.EcalChannelConstants + * EcalChannelConstants} object. + */ + private EcalChannelConstants findChannel(long cellID) { + return ecalConditions.getChannelConstants(ecalConditions.getChannelCollection().findGeometric(cellID)); + } +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EcalRawConverterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EcalRawConverterNoSpacingReadoutDriver.java new file mode 100755 index 000000000..670e827a7 --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/EcalRawConverterNoSpacingReadoutDriver.java @@ -0,0 +1,153 @@ +package org.hps.digi.nospacing; + +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; + +import org.hps.readout.ReadoutDriver; +import org.hps.conditions.database.DatabaseConditionsManager; +import org.hps.conditions.ecal.EcalChannelConstants; +import org.hps.conditions.ecal.EcalConditions; +//import org.hps.readout.RawConverterNoSpacingReadoutDriver; +import org.hps.readout.rawconverter.AbstractMode3RawConverter; +import org.hps.readout.rawconverter.EcalReadoutMode3RawConverter; +import org.hps.record.daqconfig2019.ConfigurationManager2019; +import org.hps.record.daqconfig2019.DAQConfig2019; +import org.hps.record.daqconfig.ConfigurationManager; +import org.hps.record.daqconfig.DAQConfig; +import org.lcsim.geometry.Detector; +import org.lcsim.geometry.subdetector.HPSEcal3; + +/** + * EcalRawConverterNoSpacingReadoutDriver is an implementation of + * {@link org.hps.readout.RawConverterReadoutDriver + * RawConverterReadoutDriver} for the calorimeter subdetector. + * + * @see org.hps.readout.RawConverterReadoutDriver + */ +public class EcalRawConverterNoSpacingReadoutDriver extends RawConverterNoSpacingReadoutDriver { + /** + * The converter object responsible for processing raw hits into + * proper {@link org.lcsim.event.CalorimeterHit CalorimeterHit} + * objects. + */ + private EcalReadoutMode3RawConverter converter = new EcalReadoutMode3RawConverter(); + + /** + * Cached copy of the calorimeter conditions. All calorimeter + * conditions should be called from here, rather than by directly + * accessing the database manager. + */ + private EcalConditions ecalConditions = null; + + /** + * Instantiates the driver with the correct default parameters. + */ + public EcalRawConverterNoSpacingReadoutDriver() { + super("EcalRawHits", "EcalCorrectedHits"); + setSkipBadChannels(true); + } + + /** + * Sets whether or not the DAQ configuration is applied into the driver + * the EvIO data stream or whether to read the configuration from data files. + * + * @param state - true indicates that the DAQ configuration is + * applied into the readout system, and false that it + * is not applied into the readout system. + */ + public void setDaqConfiguration2016AppliedintoReadout(boolean state) { + // Track changes in the DAQ configuration. + if (state) { + ConfigurationManager.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + // Get the DAQ configuration. + DAQConfig daq = ConfigurationManager.getInstance(); + + // Load the DAQ settings from the configuration manager. + getConverter().setNumberSamplesAfter(daq.getFADCConfig().getNSA()); + getConverter().setNumberSamplesBefore(daq.getFADCConfig().getNSB()); + + // Get the FADC configuration. + getConverter().setFADCConfig2016(daq.getFADCConfig()); + } + }); + } + } + + /** + * Sets whether or not the DAQ configuration is applied into the driver + * the EvIO data stream or whether to read the configuration from data files. + * + * @param state - true indicates that the DAQ configuration is + * applied into the readout system, and false that it + * is not applied into the readout system. + */ + public void setDaqConfigurationAppliedintoReadout(boolean state) { + // Track changes in the DAQ configuration. + if (state) { + ConfigurationManager2019.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + // Get the DAQ configuration. + DAQConfig2019 daq = ConfigurationManager2019.getInstance(); + + // Load the DAQ settings from the configuration manager. + getConverter().setNumberSamplesAfter(daq.getEcalFADCConfig().getNSA()); + getConverter().setNumberSamplesBefore(daq.getEcalFADCConfig().getNSB()); + + // Get the FADC configuration. + getConverter().setFADCConfigEcal2019(daq.getEcalFADCConfig()); + } + }); + } + } + + /** + * Indicates whether or not data from channels flagged as "bad" + * in the conditions system should be ignored. true + * indicates that they should be ignored, and false + * that they should not. + * @param apply - true indicates that "bad" channels + * will be ignored and false that they will not. + */ + @Override + public void setSkipBadChannels(boolean state) { + super.skipBadChannels = state; + } + + @Override + protected AbstractMode3RawConverter getConverter() { + return converter; + } + + @Override + protected String getSubdetectorReadoutName(Detector detector) { + HPSEcal3 calorimeterGeometry = (HPSEcal3) detector.getSubdetector("Ecal"); + return calorimeterGeometry.getReadout().getName(); + } + + @Override + protected boolean isBadChannel(long channelID) { + return findChannel(channelID).isBadChannel(); + } + + @Override + protected void updateDetectorDependentParameters(Detector detector) { + ecalConditions = DatabaseConditionsManager.getInstance().getEcalConditions(); + } + + /** + * Gets the channel parameters for a given channel ID. + * @param cellID - The long ID value that represents + * the channel. This is typically acquired from the method {@link + * org.lcsim.event.CalorimeterHit#getCellID() getCellID()} in a + * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} object. + * @return Returns the channel parameters for the channel as an + * {@link org.hps.conditions.ecal.EcalChannelConstants + * EcalChannelConstants} object. + */ + private EcalChannelConstants findChannel(long cellID) { + return ecalConditions.getChannelConstants(ecalConditions.getChannelCollection().findGeometric(cellID)); + } +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsDriver.java new file mode 100644 index 000000000..5f99e4ee6 --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsDriver.java @@ -0,0 +1,147 @@ +package org.hps.digi.nospacing; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.List; + +import org.lcsim.event.EventHeader; +import org.lcsim.event.base.BaseLCSimEvent; +import org.lcsim.event.EventHeader.LCMetaData; +import org.lcsim.geometry.Detector; +import org.lcsim.conditions.ConditionsManager; +import org.lcsim.util.Driver; +import org.lcsim.event.EventHeader; + +import org.lcsim.event.SimCalorimeterHit; +import org.lcsim.event.SimTrackerHit; +import org.lcsim.event.MCParticle; + +/* + * This driver will create an empty lcsim event + * and call super.process() so that all of the registered + * drivers run over this empty event. + * + * + */ + + +public class EmptyEventsDriver extends Driver{ + + private int nEmptyToInsert=250; //number of events to insert between real MC events + private int emptyCount=0; //counter + //make collections for all needed by readout sim + EventHeader emptyEvent; + boolean gotFirstRealEvent=false; + //names of collections + Map baseCollectionMap=new HashMap>(); + + @Override + public void detectorChanged(Detector det) { + + // in here, make empty collections. + // since these are members and don't change + // should be able just keep adding some one + // to empty "event"....hopefully this speeds + // things up a lot. + + System.out.println("EmptyEventsDriver:: Setting up base map"); + + baseCollectionMap.put("EcalHits",SimCalorimeterHit.class); + baseCollectionMap.put("HodoscopeHits",SimTrackerHit.class); + baseCollectionMap.put("MCParticle",MCParticle.class); + baseCollectionMap.put("TrackerHits",SimTrackerHit.class); + baseCollectionMap.put("TrackerHitsECal",SimTrackerHit.class); + + + } + + + @Override + public void process(EventHeader event) { + // System.out.println("EmptyEventsDriver:: processing event!"); + + if(!gotFirstRealEvent){ + System.out.println("EmptyEventsDriver:: Making the empty bunch"); + //make an empty lcsim event based on this, real event + emptyEvent=makeEmptyMCEvent(event); + gotFirstRealEvent=true; + } + + // check if we should add empty or continue + + if(emptyCount mcCollections = new ArrayList(mcEvent.getMetaData()); + for (LCMetaData mcCollectionMeta : mcCollections) { + String mcCollectionName = mcCollectionMeta.getName(); + // check to see if this collection is in the base map + // if so, copy collection, clear it, and put it in new event. + if (baseCollectionMap.containsKey(mcCollectionName)){ + List collection =new ArrayList<> ((List) mcEvent.get(mcCollectionName)); + collection.clear(); //remove element + System.out.println("EmptyEventsDriver:: inserting collection "+mcCollectionName); + + this.putCollection(mcCollectionMeta, collection, lcsimEvent); + } + } + System.out.println("EmptyEventsDriver::returning empty event"); + return lcsimEvent; + } + + + + protected void putCollection(LCMetaData collection, List entries, EventHeader event) { + String[] readout = collection.getStringParameters().get("READOUT_NAME"); + if (readout != null) { + event.put(collection.getName(), entries, collection.getType(), collection.getFlags(), readout[0]); + } else { + event.put(collection.getName(), entries, collection.getType(), collection.getFlags()); + } + if (this.getHistogramLevel() > HLEVEL_NORMAL) + System.out.println("Putting collection " + collection.getName() + " into event."); + } + + private void clearEvent(EventHeader event){ + List evtCollections = new ArrayList(event.getMetaData()); + for (LCMetaData evtCollectionMeta : evtCollections) { + String colName=evtCollectionMeta.getName(); + List col=(List)event.get(colName); + if(col.size()>0){ + System.out.println("clearing collection "+colName+" of size = "+col.size()); + ((List)event.get(colName)).clear(); + System.out.println(".....new size = "+col.size()); + } + } + } + +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsReadoutDriver.java new file mode 100644 index 000000000..6267057dd --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsReadoutDriver.java @@ -0,0 +1,223 @@ +package org.hps.digi.nospacing; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.List; + +import org.lcsim.event.EventHeader; +import org.lcsim.event.base.BaseLCSimEvent; +import org.lcsim.event.EventHeader.LCMetaData; +import org.lcsim.geometry.Detector; +import org.lcsim.conditions.ConditionsManager; +import org.lcsim.util.Driver; +import org.lcsim.event.EventHeader; + +import org.lcsim.event.SimCalorimeterHit; +import org.lcsim.event.SimTrackerHit; +import org.lcsim.event.MCParticle; + +import org.hps.readout.ReadoutDataManager; +import org.hps.readout.ReadoutDriver; +import org.hps.readout.ReadoutTimestamp; + + +/* + * This driver will create an empty lcsim event + * and call super.process() so that all of the registered + * drivers run over this empty event. + * + * + */ + + +public class EmptyEventsReadoutDriver extends ReadoutDriver{ + + private int nEmptyToInsert=250; //number of events to insert between real MC events + private int emptyCount=0; //counter + //make collections for all needed by readout sim + EventHeader emptyEvent; + boolean gotFirstRealEvent=false; + //names of collections + Map baseCollectionMap=new HashMap>(); + + List baseCollectionNames=Arrays.asList("EcalHits","HodoscopeHits","MCParticle","TrackerHits","TrackerHitsECal"); + List mcCollections = null; + @Override + public void detectorChanged(Detector det) { + + // in here, make empty collections. + // since these are members and don't change + // should be able just keep adding some one + // to empty "event"....hopefully this speeds + // things up a lot. + + System.out.println("EmptyEventsReadoutDriver:: Setting up base map"); + + baseCollectionMap.put("EcalHits",SimCalorimeterHit.class); + baseCollectionMap.put("HodoscopeHits",SimTrackerHit.class); + baseCollectionMap.put("MCParticle",MCParticle.class); + baseCollectionMap.put("TrackerHits",SimTrackerHit.class); + baseCollectionMap.put("TrackerHitsECal",SimTrackerHit.class); + + + } + + + @Override + public void process(EventHeader event) { + System.out.println("EmptyEventsReadoutDriver:: processing event!"); + System.out.println(event.toString()); + printCollections(event); + System.out.println("empty count = "+emptyCount); + if(!gotFirstRealEvent){ + System.out.println("EmptyEventsReadoutDriver:: Making the empty bunch"); + //make an empty lcsim event based on this, real event + // emptyEvent=makeEmptyEventFromMC(event); + //just get the metadata from first event + getMCMetaData(event); + gotFirstRealEvent=true; + } + + // check if we should add empty or continue + + if(emptyCount mcCollections = new ArrayList(mcEvent.getMetaData()); + for (LCMetaData mcCollectionMeta : mcCollections) { + String mcCollectionName = mcCollectionMeta.getName(); + // check to see if this collection is in the base map + // if so, copy collection, clear it, and put it in new event. + if (baseCollectionMap.containsKey(mcCollectionName)){ + List collection =new ArrayList<> ((List) mcEvent.get(mcCollectionName)); + collection.clear(); //remove element + System.out.println("EmptyEventsReadoutDriver:: inserting collection "+mcCollectionName); + + this.putCollection(mcCollectionMeta, collection, lcsimEvent); + } + } + System.out.println("EmptyEventsReadoutDriver::returning empty event"); + return lcsimEvent; + } + + private EventHeader makeEmptyEvent(){ + int eventID=666666; + long time=(long)ReadoutDataManager.getCurrentTime(); + System.out.println("making an empty bunch with time = "+time); + //this was taken from evio/src/main/java/org/hps/evio/BaseEventBuilder.java + // Create a new LCSimEvent. + EventHeader lcsimEvent = + new BaseLCSimEvent( + ConditionsManager.defaultInstance().getRun(), + eventID, + ConditionsManager.defaultInstance().getDetector(), + time); + + // for (Map.Entry> thisEntry : baseCollectionMap.entrySet()) { + for (String name : baseCollectionNames) { + // String name = entry.getKey(); + // use the already obtained Metadata from the first MC event + // in order to get the flags right + System.out.println("EmptyEventsReadoutDriver:: inserting collection "+name); + for(LCMetaData mcCollectionMeta : mcCollections) { + // System.out.println("looping over collections from mcMetaData: "+mcCollectionMeta.getName()); + if (mcCollectionMeta.getName().equals(name)){ + List collection = new ArrayList<> (); + // System.out.println("EmptyEventsReadoutDriver:: inserting collection "+name); + this.putCollection(mcCollectionMeta, collection, lcsimEvent); + } + } + + } + + System.out.println("####################### this should be an empty event ###################"); + printCollections(lcsimEvent); + System.out.println("#############################################################################"); + return lcsimEvent; + + } + + protected void putCollection(LCMetaData meta, List entries, EventHeader event) { + String[] readout = meta.getStringParameters().get("READOUT_NAME"); + if (readout != null) { + event.put(meta.getName(), entries, meta.getType(), meta.getFlags(), readout[0]); + } else { + event.put(meta.getName(), entries, meta.getType(), meta.getFlags()); + } + if (this.getHistogramLevel() > HLEVEL_NORMAL) + System.out.println("Putting collection" + meta.getName() + " into event."); + } + + private void getMCMetaData(EventHeader mcEvent){ + mcCollections = new ArrayList(mcEvent.getMetaData()); + } + + private void clearEvent(EventHeader event){ + List evtCollections = new ArrayList(event.getMetaData()); + for (LCMetaData evtCollectionMeta : evtCollections) { + String colName=evtCollectionMeta.getName(); + List col=(List)event.get(colName); + if(col.size()>0){ + System.out.println("clearing collection "+colName+" of size = "+col.size()); + ((List)event.get(colName)).clear(); + System.out.println(".....new size = "+col.size()); + } + } + } + + private void printCollections(EventHeader event){ + List Collections = new ArrayList(event.getMetaData()); + for (LCMetaData CollectionMeta : Collections) { + String CollectionName = CollectionMeta.getName(); + // check to see if this collection is in the base map + // if so, copy collection, clear it, and put it in new event. + List collection =new ArrayList<> ((List) event.get(CollectionName)); + System.out.println("EmptyEventsReadoutDriver::printCollections:: "+CollectionName+" has "+collection.size()+" entries"); + } + } + + @Override + protected double getTimeDisplacement() { + return 0; + } + + @Override + protected double getTimeNeededForLocalOutput() { + // TODO: Probably should have some defined value - buffer seems to be filled enough from the ecal delay alone, though. + return 0; + } +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/GTPClusterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/GTPClusterNoSpacingReadoutDriver.java new file mode 100755 index 000000000..539f28a7c --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/GTPClusterNoSpacingReadoutDriver.java @@ -0,0 +1,390 @@ +package org.hps.digi.nospacing; + +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.hps.conditions.database.DatabaseConditionsManager; +import org.hps.readout.ReadoutDataManager; +import org.hps.readout.ReadoutDriver; +import org.hps.readout.util.collection.LCIOCollection; +import org.hps.readout.util.collection.LCIOCollectionFactory; +import org.hps.readout.util.collection.TriggeredLCIOData; +import org.hps.recon.ecal.cluster.ClusterType; +import org.hps.record.daqconfig2019.ConfigurationManager2019; +import org.hps.record.daqconfig2019.DAQConfig2019; +import org.hps.record.daqconfig2019.VTPConfig2019; +import org.hps.record.daqconfig.ConfigurationManager; +import org.hps.record.daqconfig.DAQConfig; +import org.hps.record.daqconfig.GTPConfig; +import org.lcsim.event.CalorimeterHit; +import org.lcsim.event.Cluster; +import org.lcsim.event.EventHeader; +import org.lcsim.event.base.BaseCluster; +import org.lcsim.geometry.Detector; +import org.lcsim.geometry.subdetector.HPSEcal3; +import org.lcsim.geometry.subdetector.HPSEcal3.NeighborMap; +import org.lcsim.lcio.LCIOConstants; + +/** + * Class GTPClusterNoSpacingReadoutDriver produces GTP cluster + * objects for use in the readout trigger simulation. It takes in + * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} objects as + * input and generates clusters from these using the GTP algorithm. + * This algorithm works by selected all hits in the current + * clock-cycle (4 ns period) and comparing them to adjacent hits. If + * a given hit is an energy maximum compared to all adjacent hits in + * both the current clock-cycle, and a number of clock-cycles before + * and after the current cycle (defined through the variable {@link + * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#temporalWindow + * temporalWindow} and set through the method {@link + * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#setClusterWindow(int) + * setClusterWindow(int)}), then it is a seed hit so long as it also + * exceeds a certain minimum energy (defined through the variable + * {@link + * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#seedEnergyThreshold + * seedEnergyThreshold} and set through the method {@link + * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#setSeedEnergyThreshold(double) + * setSeedEnergyThreshold(double)}).

+ * Clusters are then output as objects of type {@link + * org.lcsim.event.Cluster Cluster} to the specified output + * collection. If the {@link + * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#setWriteClusterCollection(boolean) + * setWriteClusterCollection(boolean)} is set to true, the clusters + * will also be persisted into the output LCIO file. + */ +public class GTPClusterNoSpacingReadoutDriver extends ReadoutDriver { + // ============================================================== + // ==== LCIO Collections ======================================== + // ============================================================== + + /** + * The name of the collection that contains the calorimeter hits + * from which clusters should be generated. + */ + private String inputCollectionName = "EcalCorrectedHits"; + /** + * The name of the collection into which generated clusters should + * be output. + */ + private String outputCollectionName = "EcalClustersGTP"; + + // ============================================================== + // ==== Driver Options ========================================== + // ============================================================== + + /** + * The time window used for cluster verification. A seed hit must + * be the highest energy hit within plus or minus this range in + * order to be considered a valid cluster. + */ + private int temporalWindow = 48; + /** + * The minimum energy needed for a hit to be considered as a seed + * hit candidate. + */ + private double seedEnergyThreshold = 0.050; + /** + * The local time for the driver. This starts at 2 ns due to a + * quirk in the timing of the {@link + * org.hps.readout.ecal.updated.EcalReadoutDriver + * EcalReadoutDriver}. + */ + private double localTime = 0.0; + /** + * The length of time by which objects produced by this driver + * are shifted due to the need to buffer data from later events. + * This is calculated automatically. + */ + private double localTimeDisplacement = 0; + + // ============================================================== + // ==== Driver Parameters ======================================= + // ============================================================== + + /** + * An object which can provide, given an argument cell ID, a map + * of cell IDs that are physically adjacent to the argument ID. + * This is used to determine adjacency for energy comparisons in + * the clustering algorithm. + */ + private NeighborMap neighborMap; + + private HPSEcal3 calorimeterGeometry = null; + + private boolean checkInputStatus=false; //don't check status if running on non-spaced events. + + /** + * Sets whether or not the DAQ configuration is applied into the driver + * the EvIO data stream or whether to read the configuration from data files. + * + * @param state - true indicates that the DAQ configuration is + * applied into the readout system, and false that it + * is not applied into the readout system. + */ + public void setDaqConfiguration2016AppliedintoReadout(boolean state) { + // If the DAQ configuration should be read, attach a listener + // to track when it updates. + if (state) { + ConfigurationManager.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + // Get the DAQ configuration. + DAQConfig daq = ConfigurationManager.getInstance(); + GTPConfig config = daq.getGTPConfig(); + + // Load the DAQ settings from the configuration manager. + seedEnergyThreshold = config.getSeedEnergyCutConfig().getLowerBound(); + } + }); + } + } + + /** + * Sets whether or not the DAQ configuration is applied into the driver + * the EvIO data stream or whether to read the configuration from data files. + * + * @param state - true indicates that the DAQ configuration is + * applied into the readout system, and false that it + * is not applied into the readout system. + */ + public void setDaqConfigurationAppliedintoReadout(boolean state) { + // If the DAQ configuration should be read, attach a listener + // to track when it updates. + if (state) { + ConfigurationManager2019.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + // Get the DAQ configuration. + DAQConfig2019 daq = ConfigurationManager2019.getInstance(); + VTPConfig2019 config = daq.getVTPConfig(); + + // Load the DAQ settings from the configuration manager. + seedEnergyThreshold = config.getEcalClusterSeedThr(); + temporalWindow = config.getEcalClusterHitDT(); + } + }); + } + } + + @Override + public void detectorChanged(Detector etector) { + // Get the calorimeter data object. + //HPSEcal3 ecal = (HPSEcal3) DatabaseConditionsManager.getInstance().getDetectorObject().getSubdetector("Ecal"); + calorimeterGeometry = (HPSEcal3) DatabaseConditionsManager.getInstance().getDetectorObject().getSubdetector("Ecal"); + if(calorimeterGeometry == null) { + throw new IllegalStateException("Error: Calorimeter geometry data object not defined."); + } + + // Get the calorimeter hit neighbor map. + neighborMap = calorimeterGeometry.getNeighborMap(); + if(neighborMap == null) { + throw new IllegalStateException("Error: Calorimeter hit neighbor map is not defined."); + } + } + + @Override + public void process(EventHeader event) { + // Check the data management driver to determine whether the + // input collection is available or not. + if(checkInputStatus&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, ReadoutDataManager.getCurrentTime() +192.0)) { + // System.out.println("Skipping GTP Readout with because collection doesn't exist at "+(ReadoutDataManager.getCurrentTime() + 192.0)); + return; + } + + // Get the hits that occur during the present clock-cycle, as + // well as the hits that occur in the verification window + // both before and after the current clock-cycle. + // TODO: Simplify this? + Collection allHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 192.0, inputCollectionName, CalorimeterHit.class); + // Collection foreHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime() - temporalWindow, ReadoutDataManager.getCurrentTime(), inputCollectionName, CalorimeterHit.class); + //Collection postHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime() + 4.0, ReadoutDataManager.getCurrentTime() + temporalWindow + 4.0, inputCollectionName, CalorimeterHit.class); + + + // List allHits = new ArrayList(seedCandidates.size() + foreHits.size() + postHits.size()); + + //allHits.addAll(foreHits); + //allHits.addAll(seedCandidates); + //allHits.addAll(postHits); + // System.out.println(this.getClass().getName()+":: "+inputCollectionName+":: local time = "+ReadoutDataManager.getCurrentTime()+" number of seeds = "+allHits.size()); + + // Store newly created clusters. + List gtpClusters = new ArrayList(); + + // Iterate over all seed hit candidates. + seedLoop: + for(CalorimeterHit seedCandidate : allHits) { + // A seed candidate must meet a minimum energy cut to be + // considered for clustering. + if(seedCandidate.getRawEnergy() < seedEnergyThreshold) { + continue seedLoop; + } + + // Collect other hits that are adjacent to the seed hit + // and may be a part of the cluster. + List clusterHits = new ArrayList(); + + // Iterate over all other hits in the clustering window + // and check that the seed conditions are met for the + // seed candidate. Note that all hits are properly within + // the clustering time window by definition, so the time + // condition is not checked explicitly. + hitLoop: + for(CalorimeterHit hit : allHits) { + // If the hit is not adjacent to the seed hit, it can + // be ignored. + if(!neighborMap.get(seedCandidate.getCellID()).contains(hit.getCellID())) { + continue hitLoop; + } + + // A seed hit must have the highest energy in its + // spatiotemporal window. If it is not, this is not a + // valid seed hit. + if(seedCandidate.getRawEnergy() < hit.getRawEnergy()) { + continue seedLoop; + } + + // Add the hit to the list of cluster hits. + clusterHits.add(hit); + } + + // If no adjacent hit was found that invalidates the seed + // condition, then the seed candidate is valid and a + // cluster should be formed. + gtpClusters.add(createBasicCluster(seedCandidate, clusterHits)); + } + + // Pass the clusters to the data management driver. + // System.out.println(this.getClass().getName()+":: number of GTP Clusters "+gtpClusters.size()); + ReadoutDataManager.addData(outputCollectionName, gtpClusters, Cluster.class); + } + + @Override + public void startOfData() { + // Define the output LCSim collection parameters. + LCIOCollectionFactory.setCollectionName(outputCollectionName); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollectionFactory.setFlags(1 << LCIOConstants.CLBIT_HITS); + LCIOCollection clusterCollectionParams = LCIOCollectionFactory.produceLCIOCollection(Cluster.class); + + // Instantiate the GTP cluster collection with the readout + // data manager. + localTimeDisplacement = temporalWindow + 4.0; + addDependency(inputCollectionName); + ReadoutDataManager.registerCollection(clusterCollectionParams, false); + } + + @Override + protected Collection> getOnTriggerData(double triggerTime) { + // If clusters are not to be output, return null. + if(!isPersistent()) { return null; } + + // Create a list to store the on-trigger collections. There + // are two collections outputs for this driver - the clusters + // and the cluster hits. Unlike other drivers, the clusterer + // must handle its own output because the manager does not + // know that it must also specifically output the hits from + // each cluster as well. + List> collectionsList = new ArrayList>(2); + + // Define the LCIO collection settings for the clusters. + LCIOCollectionFactory.setCollectionName(outputCollectionName); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollectionFactory.setFlags(1 << LCIOConstants.CLBIT_HITS); + LCIOCollection clusterCollectionParams = LCIOCollectionFactory.produceLCIOCollection(Cluster.class); + + // Define the LCIO collection settings for the cluster hits. + int hitFlags = 0; + hitFlags += 1 << LCIOConstants.RCHBIT_TIME; + hitFlags += 1 << LCIOConstants.RCHBIT_LONG; + LCIOCollectionFactory.setCollectionName("EcalClustersGTPSimHits"); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollectionFactory.setFlags(hitFlags); + LCIOCollectionFactory.setReadoutName(calorimeterGeometry.getReadout().getName()); + LCIOCollection clusterHitsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(CalorimeterHit.class); + + // Get the output time range for clusters. This is either the + // user defined output range, or the default readout window + // that is defined by the readout data manager. + double startTime; + if(Double.isNaN(getReadoutWindowBefore())) { startTime = triggerTime - ReadoutDataManager.getTriggerOffset(); } + else { startTime = triggerTime - getReadoutWindowBefore(); } + + double endTime; + if(Double.isNaN(getReadoutWindowAfter())) { endTime = startTime + ReadoutDataManager.getReadoutWindow(); } + else { endTime = triggerTime + getReadoutWindowAfter(); } + + // Get the cluster data and populate a list of cluster hits. + Collection clusters = ReadoutDataManager.getData(startTime, endTime, outputCollectionName, Cluster.class); + List clusterHits = new ArrayList(); + for(Cluster cluster : clusters) { + clusterHits.addAll(cluster.getCalorimeterHits()); + } + + // Create the LCIO on-trigger data lists. + TriggeredLCIOData clusterHitData = new TriggeredLCIOData(clusterHitsCollectionParams); + clusterHitData.getData().addAll(clusterHits); + collectionsList.add(clusterHitData); + + TriggeredLCIOData clusterData = new TriggeredLCIOData(clusterCollectionParams); + clusterData.getData().addAll(clusters); + collectionsList.add(clusterData); + + // Return the on-trigger data. + return collectionsList; + } + + @Override + protected double getTimeDisplacement() { + return localTimeDisplacement; + } + + @Override + protected double getTimeNeededForLocalOutput() { + return 0; + } + + /** + * Creates a new cluster object from a seed hit and list of hits. + * @param seedHit - The seed hit of the new cluster. + * @param hits - The hits for the new cluster. + * @return Returns a {@link org.lcsim.event.Cluster Cluster} + * object with the specified properties. + */ + private static final Cluster createBasicCluster(CalorimeterHit seedHit, List hits) { + BaseCluster cluster = new BaseCluster(); + cluster.setType(ClusterType.GTP.getType()); + cluster.addHit(seedHit); + cluster.setPosition(seedHit.getDetectorElement().getGeometry().getPosition().v()); + cluster.setNeedsPropertyCalculation(false); + cluster.addHits(hits); + return cluster; + } + + /** + * Sets the size of the hit verification temporal window. Note + * that this defines the size of the window in one direction, so + * the full time window will be (2 * clusterWindow)+ + * 1 clock-cycles in length. (i.e., it will be a length of + * clusterWindow before the seed hit, a length of + * clusterWindow after the seed hit, plus the cycle + * that includes the seed hit.) Time length is in clock-cycles. + * @param value - The number of clock-cycles around the hit in + * one direction. + */ + public void setClusterWindow(int value) { + temporalWindow = value * 4; + } + + /** + * Sets the minimum seed energy needed for a hit to be considered + * for forming a cluster. This is the seed energy lower bound + * trigger cut and is in units of GeV. + * @param value - The minimum cluster seed energy in GeV. + */ + public void setSeedEnergyThreshold(double value) { + seedEnergyThreshold = value; + } +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/HodoscopeDigiWithPulseNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/HodoscopeDigiWithPulseNoSpacingReadoutDriver.java new file mode 100644 index 000000000..57103f85e --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/HodoscopeDigiWithPulseNoSpacingReadoutDriver.java @@ -0,0 +1,224 @@ +package org.hps.digi.nospacing; + +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.hps.readout.ReadoutDriver; +import org.hps.conditions.database.DatabaseConditionsManager; +import org.hps.conditions.hodoscope.HodoscopeChannel; +import org.hps.conditions.hodoscope.HodoscopeCalibration; +import org.hps.conditions.hodoscope.HodoscopeCalibration.HodoscopeCalibrationCollection; +import org.hps.conditions.hodoscope.HodoscopeChannel.HodoscopeChannelCollection; +import org.hps.conditions.hodoscope.HodoscopeGain; +import org.hps.conditions.hodoscope.HodoscopeGain.HodoscopeGainCollection; +import org.hps.conditions.hodoscope.HodoscopeTimeShift; +import org.hps.conditions.hodoscope.HodoscopeTimeShift.HodoscopeTimeShiftCollection; +import org.hps.readout.ReadoutTimestamp; +import org.lcsim.event.RawTrackerHit; +import org.lcsim.geometry.Detector; +import org.lcsim.geometry.subdetector.Hodoscope_v1; + +import org.hps.conditions.hodoscope.HodoscopeConditions; + +import org.hps.record.daqconfig2019.ConfigurationManager2019; +import org.hps.record.daqconfig2019.DAQConfig2019; +import org.hps.record.daqconfig2019.FADCConfigHodo2019; + +/** + * Class HodoscopeDigitizationWithPulserDataMergingReadoutDriver is an + * implementation of the {@link + * org.hps.digi.CalDigiWithPulserNoSpacingReadoutDriver} for a subdetector of type {@link + * org.lcsim.geometry.subdetector.Hodoscope_v1 Hodoscope_v1}. It + * handles all of the hodoscope-specific functions needed by the + * superclass. + * + * @author Tongtong Cao + */ +public class HodoscopeDigiWithPulseNoSpacingReadoutDriver extends CalDigiWithPulserNoSpacingReadoutDriver { + // The DAQ configuration manager for FADC parameters. + private FADCConfigHodo2019 config = new FADCConfigHodo2019(); + private boolean configStat = false; // Indicates if DAQ configuration is loaded + + // The number of nanoseconds in a clock-cycle (sample). + private static final int nsPerSample = 4; + + /** Stores the set of all channel IDs for the hodoscope. */ + private Set channelIDSet = new HashSet(); + /** Maps hodoscope channels to the gain for that channel. */ + private Map channelToGainsMap = new HashMap(); + /** Maps hodoscope channels to the time shifts for that channel. */ + private Map channelToTimeShiftsMap = new HashMap(); + /** Maps hodoscope channels to the noise sigma and pedestals for that channel. */ + private Map channelToCalibrationsMap = new HashMap(); + /** Factor for gain conversion from self-define-unit/ADC to MeV/ADC. */ + private double factorGainConversion = 0.000833333; + /** Gain scaling factor for raw energy (self-defined unit) of FADC hits. + * In DAQ configuration, gains are scaled by the gain scaling factor for two-hole tiles. + * Such gains from DAQ configuration should be divided by the factor. + */ + + private HodoscopeConditions hodoConditions = null; + + public HodoscopeDigiWithPulseNoSpacingReadoutDriver() { + // Set the default values for each subdetector-dependent + // parameter. + setGeometryName("Hodoscope"); + + setInputHitCollectionName("HodoscopeHits"); + setOutputHitCollectionName("HodoscopeRawHits"); + setTruthRelationsCollectionName("HodoscopeTruthRelations"); + setTriggerPathTruthRelationsCollectionName("HodoscopeTriggerPathTruthRelations"); + setReadoutHitCollectionName("HodoscopeReadoutHits"); + + setNumberSamplesAfter(10); + setNumberSamplesBefore(6); + setPulseTimeParameter(4.0); + setPhotoelectronsPerMeV(10.0); + + setIntegrationThreshold(12); + } + + /** + * Sets whether or not the DAQ configuration is applied into the driver + * the EvIO data stream or whether to read the configuration from data files. + * + * @param state - true indicates that the DAQ configuration is + * applied into the readout system, and false that it + * is not applied into the readout system. + */ + public void setDaqConfigurationAppliedintoReadout(boolean state) { + // If the DAQ configuration should be read, attach a listener + // to track when it updates. + if (state) { + ConfigurationManager2019.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + // Get the DAQ configuration. + DAQConfig2019 daq = ConfigurationManager2019.getInstance(); + + // Load the DAQ settings from the configuration manager. + numSamplesAfter = daq.getHodoFADCConfig().getNSA() / nsPerSample; + numSamplesBefore = daq.getHodoFADCConfig().getNSB() / nsPerSample; + readoutWindow = daq.getHodoFADCConfig().getWindowWidth() / nsPerSample; + pulserDataWindow = readoutWindow; + + // Get the FADC configuration. + config = daq.getHodoFADCConfig(); + configStat = true; + } + }); + } + + } + + + @Override + public void detectorChanged(Detector detector) { + // Get a copy of the calorimeter conditions for the detector. + hodoConditions = DatabaseConditionsManager.getInstance().getHodoConditions(); + + // Populate the channel ID collections. + populateChannelCollections(); + + // Run the superclass method. + super.detectorChanged(detector); + } + + @Override + protected Set getChannelIDs() { + return channelIDSet; + } + + @Override + protected Long getID(RawTrackerHit hit) { + return Long.valueOf(hodoConditions.getChannels().findGeometric(hit.getCellID()).getChannelId().intValue()); + } + + @Override + protected double getGainConditions(long channelID) { + if(channelToGainsMap.containsKey(Long.valueOf(channelID))) { + return channelToGainsMap.get(Long.valueOf(channelID)).getGain() * factorGainConversion; + } else { + throw new IllegalArgumentException("No gain conditions exist for hodoscope channel ID \"" + channelID + "\"."); + } + } + + @Override + protected double getNoiseConditions(long channelID) { + if(channelToCalibrationsMap.containsKey(Long.valueOf(channelID))) { + return channelToCalibrationsMap.get(Long.valueOf(channelID)).getNoise(); + } else { + throw new IllegalArgumentException("No noise conditions exist for hodoscope channel ID \"" + channelID + "\"."); + } + } + + @Override + protected double getPedestalConditions(long channelID) { + if (channelToCalibrationsMap.containsKey(Long.valueOf(channelID))) { + return channelToCalibrationsMap.get(Long.valueOf(channelID)).getPedestal(); + } else { + throw new IllegalArgumentException( + "No pedestal conditions exist for hodoscope channel ID \"" + channelID + "\"."); + } + } + + @Override + protected double getTimeShiftConditions(long channelID) { + if(channelToTimeShiftsMap.containsKey(Long.valueOf(channelID))) { + return channelToTimeShiftsMap.get(Long.valueOf(channelID)).getTimeShift(); + } else { + throw new IllegalArgumentException("No time shift conditions exist for hodoscope channel ID \"" + channelID + "\"."); + } + } + + @Override + protected int getTimestampFlag() { + return ReadoutTimestamp.SYSTEM_HODOSCOPE; + } + + /** + * Populates the channel ID set and maps all existing channels to + * their respective conditions. + */ + private void populateChannelCollections() { + // Load the conditions database and get the hodoscope channel + // collection data. + final DatabaseConditionsManager conditions = DatabaseConditionsManager.getInstance(); + final HodoscopeGainCollection gains = conditions.getCachedConditions(HodoscopeGainCollection.class, "hodo_gains").getCachedData(); + final HodoscopeChannelCollection channels = conditions.getCachedConditions(HodoscopeChannelCollection.class, "hodo_channels").getCachedData(); + final HodoscopeTimeShiftCollection timeShifts = conditions.getCachedConditions(HodoscopeTimeShiftCollection.class, "hodo_time_shifts").getCachedData(); + final HodoscopeCalibrationCollection calibrations = conditions.getCachedConditions(HodoscopeCalibrationCollection.class, "hodo_calibrations").getCachedData(); + + // Map the gains to channel IDs. + for(HodoscopeGain gain : gains) { + channelToGainsMap.put(Long.valueOf(gain.getChannelId().intValue()), gain); + } + + // Map the pedestals and noise to channel IDs. + for(HodoscopeCalibration calibration : calibrations) { + channelToCalibrationsMap.put(Long.valueOf(calibration.getChannelId().intValue()), calibration); + } + + // Map time shifts to channel IDs. + for(HodoscopeTimeShift timeShift : timeShifts) { + channelToTimeShiftsMap.put(Long.valueOf(timeShift.getChannelId().intValue()), timeShift); + } + + // Store the set of all channel IDs. + for(HodoscopeChannel channel : channels) { + channelIDSet.add(Long.valueOf(channel.getChannelId().intValue())); + } + } + + /** + * Sets factor for gain conversion from self-defined unit/ADC to MeV/ADC + * @param factor - factor for gain conversion from self-defined-unit/ADC to MeV/ADC. + */ + public void setFactorGainConversion(double factor) { + factorGainConversion = factor; + } +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/HodoscopePatternNoSpacingDriver.java b/digi/src/main/java/org/hps/digi/nospacing/HodoscopePatternNoSpacingDriver.java new file mode 100644 index 000000000..25ae76ed7 --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/HodoscopePatternNoSpacingDriver.java @@ -0,0 +1,436 @@ +package org.hps.digi.nospacing; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; + +import java.util.Map; + +import java.awt.Point; +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.util.ArrayList; +import java.util.List; + +import org.hps.conditions.database.DatabaseConditionsManager; +import org.hps.conditions.hodoscope.HodoscopeChannel; +import org.hps.conditions.hodoscope.HodoscopeChannel.HodoscopeChannelCollection; +import org.hps.readout.ReadoutDataManager; +import org.hps.readout.ReadoutDriver; +import org.hps.readout.util.collection.LCIOCollection; +import org.hps.readout.util.collection.LCIOCollectionFactory; +import org.hps.record.daqconfig2019.ConfigurationManager2019; +import org.hps.record.daqconfig2019.DAQConfig2019; +import org.hps.record.daqconfig2019.VTPConfig2019; +import org.hps.readout.util.HodoscopePattern; +import org.lcsim.event.CalorimeterHit; +import org.lcsim.event.EventHeader; +import org.lcsim.geometry.Detector; + +/** + * Class HodoscopePatternReadoutDriver produces hodoscope pattern + * objects for Ecal-hodo matching in the trigger simulation. Persistency of Hodo + * FADC hits is persistentTime. On the other hand, hodo FADC hits + * is earlier to enter the trigger system than Ecal by + * timeEarlierThanEcal Therefore, for each clock-cycle, FADC hits + * in [localTime - (persistentTime - timeEarlierThanEcal), localTime + + * timeEarlierThanEcal + 4] are taken into account to generate hodoscope + * patterns for all layers. + */ +public class HodoscopePatternNoSpacingDriver extends ReadoutDriver { + /** Maps hodoscope channel IDs to channels. */ + private Map channelMap = new HashMap(); + + /** + * The name of the collection that contains the hodo FADC hits, which raw energy + * is self-defined. Through the hodo FADC hits, hodoscope pattern is generated. + */ + private String inputCollectionName = "HodoscopeCorrectedHits"; + /** + * The name of the collection into which generated hodoscope patterns for all + * four layers should be output. + */ + private String outputCollectionName = "HodoscopePatterns"; + + /** + * The local time for the driver. + */ + private double localTime = 0.0; + + /** + * Hodoscope FADC hit cut + */ + private double fADCHitThreshold = 1.0; + + /** + * Hodoscope tilt/cluster hit cut + */ + private double hodoHitThreshold = 200.0; + + /** + * Gain scaling factor for hits at two-hole tiles. + * Gains from database need to be scaled by the factor + * Gains in the DAQ configuration have been scaled by the factor. + */ + private double gainFactor = 1.25 / 2; + + /** + * Persistent time for hodoscope FADC hit in unit of ns + */ + private double persistentTime = 60.0; + + /** + * Time for hodoscope FADC hits earlier to enter the trigger system than Ecal + * with unit of ns + */ + private double timeEarlierThanEcal = 0.0; + + /** + * The length of time by which objects produced by this driver are shifted due + * to the need to buffer data from later events. This is calculated + * automatically. Hodo FADC hits enter the trigger system earlier than Ecal hits + * by timeEarlierThanEcal + */ + private double localTimeDisplacement = 0.0; + + /** + * According to setup in database, index for hodoscope layers are expressed as + * (layer+1)*y + */ + public static final int TopLayer1 = 1; + public static final int TopLayer2 = 2; + public static final int BotLayer1 = -1; + public static final int BotLayer2 = -2; + + /** + * List for 4 layers; + */ + private List layerList = new ArrayList<>(4); + + /** + * List for 8 (x, hole) points of each layer + */ + private List xHolePointList = new ArrayList<>(8); + + private boolean daqConfigurationAppliedintoReadout = false; + + /** + * Sets whether or not the DAQ configuration is applied into the driver + * the EvIO data stream or whether to read the configuration from data files. + * + * @param state - true indicates that the DAQ configuration is + * applied into the readout system, and false that it + * is not applied into the readout system. + */ + public void setDaqConfigurationAppliedintoReadout(boolean state) { + daqConfigurationAppliedintoReadout = state; + + // If the DAQ configuration should be read, attach a listener + // to track when it updates. + if (state) { + ConfigurationManager2019.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + // Get the DAQ configuration. + DAQConfig2019 daq = ConfigurationManager2019.getInstance(); + VTPConfig2019 config = daq.getVTPConfig(); + + // Load the DAQ settings from the configuration manager. + fADCHitThreshold = config.getHodoFADCHitThr(); + hodoHitThreshold = config.getHodoThr(); + persistentTime = config.getHodoDT(); + } + }); + } + } + + @Override + public void process(EventHeader event) { + + // Check the data management driver to determine whether the + // input collection is available or not. + // if (!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + localTimeDisplacement)) { + // return; + // } + if (!ReadoutDataManager.checkCollectionStatus(inputCollectionName, ReadoutDataManager.getCurrentTime())) { + // System.out.println(this.getClass().getName()+":: "+inputCollectionName+" is not ready for this!"); + return; + } + + // Hodoscope FADC hits enter the trigger system earlier than Ecal by the time + // timeEarlierThanEcal . + // On the other hand, hodoscope FADC hits persist with a range of + // persistentTime. + // To build current hodo patterns, FADC hits between localTime - (persistentTime + // - timeEarlierThanEcal) and localTime + timeEarlierThanEcal + 4 are used. + // Collection fadcHits = ReadoutDataManager.getData( + // localTime - (persistentTime - timeEarlierThanEcal), localTime + timeEarlierThanEcal + 4.0, + // inputCollectionName, CalorimeterHit.class); + + Collection fadcHits = ReadoutDataManager.getData( + ReadoutDataManager.getCurrentTime() - (persistentTime - timeEarlierThanEcal), ReadoutDataManager.getCurrentTime() + timeEarlierThanEcal + 4.0, + inputCollectionName, CalorimeterHit.class); + + // System.out.println(this.getClass().getName()+":: found "+fadcHits.size()+" fadcHits"); + // Increment the local time. + + // All hits over fadcHitThreshold are saved for each hole of each + // layer + Map>> energyListMapForLayerMap = new HashMap>>(); + + for (int layer : layerList) { + Map> energyListMap = new HashMap>(); + for (Point point : xHolePointList) { + energyListMap.put(point, new ArrayList()); + } + energyListMapForLayerMap.put(layer, energyListMap); + } + + for (CalorimeterHit hit : fadcHits) { + double energy = hit.getRawEnergy(); + if (energy > fADCHitThreshold) { + Long cellID = hit.getCellID(); + int layer = channelMap.get(cellID).getLayer(); + int y = channelMap.get(cellID).getIY(); + int x = channelMap.get(cellID).getIX(); + int hole = channelMap.get(cellID).getHole(); + + Point point = new Point(x, hole); + // Energy of hits is scaled except hits at tiles 0 and 4 + if(x == 0 || x == 4) energyListMapForLayerMap.get((layer + 1) * y).get(point).add(energy); + else { + // Gains in the DAQ configuration has been scaled by the factor. + if(daqConfigurationAppliedintoReadout) energyListMapForLayerMap.get((layer + 1) * y).get(point).add(energy); + else energyListMapForLayerMap.get((layer + 1) * y).get(point).add(energy * gainFactor); + } + } + } + + //Get maximum of energy in lists for each hole of each layer + Map> maxEnergyMapForLayerMap = new HashMap>(); + for (int layer : layerList) { + Map maxEnergyMap = new HashMap<>(); + for (Point point : xHolePointList) { + if(energyListMapForLayerMap.get(layer).get(point).size() != 0) + maxEnergyMap.put(point, Collections.max(energyListMapForLayerMap.get(layer).get(point))); + else + maxEnergyMap.put(point, 0.); + + } + maxEnergyMapForLayerMap.put(layer, maxEnergyMap); + } + + //Hodoscope patterns for all layers + //Order of list: TopLayer1, TopLayer2, BotLayer1, BotLayer2 + List hodoPatterns = new ArrayList<>(4); + + // Flag to determine if a pattern list at the current clock-cycle is added into data manager + boolean flag = false; + + for (int i = 0; i < 4; i++) { + HodoscopePattern pattern = new HodoscopePattern(); + + Map maxEnergyMap = maxEnergyMapForLayerMap.get(layerList.get(i)); + + if (maxEnergyMap.get(xHolePointList.get(0)) > hodoHitThreshold) { + pattern.setHitStatus(HodoscopePattern.HODO_LX_1, true); + flag = true; + } + if (maxEnergyMap.get(xHolePointList.get(1)) + maxEnergyMap.get(xHolePointList.get(2)) > hodoHitThreshold) { + pattern.setHitStatus(HodoscopePattern.HODO_LX_2, true); + flag = true; + } + if (maxEnergyMap.get(xHolePointList.get(3)) + maxEnergyMap.get(xHolePointList.get(4)) > hodoHitThreshold) { + pattern.setHitStatus(HodoscopePattern.HODO_LX_3, true); + flag = true; + } + if (maxEnergyMap.get(xHolePointList.get(5)) + maxEnergyMap.get(xHolePointList.get(6)) > hodoHitThreshold) { + pattern.setHitStatus(HodoscopePattern.HODO_LX_4, true); + flag = true; + } + if (maxEnergyMap.get(xHolePointList.get(7)) > hodoHitThreshold) { + pattern.setHitStatus(HodoscopePattern.HODO_LX_5, true); + flag = true; + } + if (maxEnergyMap.get(xHolePointList.get(0)) + maxEnergyMap.get(xHolePointList.get(1)) + + maxEnergyMap.get(xHolePointList.get(2)) > hodoHitThreshold + && maxEnergyMap.get(xHolePointList.get(0)) != 0 + && (maxEnergyMap.get(xHolePointList.get(1)) != 0 || maxEnergyMap.get(xHolePointList.get(2)) != 0)) { + pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_12, true); + flag = true; + } + if (maxEnergyMap.get(xHolePointList.get(1)) + maxEnergyMap.get(xHolePointList.get(2)) + + maxEnergyMap.get(xHolePointList.get(3)) + + maxEnergyMap.get(xHolePointList.get(4)) > hodoHitThreshold + && (maxEnergyMap.get(xHolePointList.get(1)) != 0 || maxEnergyMap.get(xHolePointList.get(2)) != 0) + && (maxEnergyMap.get(xHolePointList.get(3)) != 0 || maxEnergyMap.get(xHolePointList.get(4)) != 0)) { + pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_23, true); + flag = true; + } + if (maxEnergyMap.get(xHolePointList.get(3)) + maxEnergyMap.get(xHolePointList.get(4)) + + maxEnergyMap.get(xHolePointList.get(5)) + + maxEnergyMap.get(xHolePointList.get(6)) > hodoHitThreshold + && (maxEnergyMap.get(xHolePointList.get(3)) != 0 || maxEnergyMap.get(xHolePointList.get(4)) != 0) + && (maxEnergyMap.get(xHolePointList.get(5)) != 0 || maxEnergyMap.get(xHolePointList.get(6)) != 0)) { + pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_34, true); + flag = true; + } + if (maxEnergyMap.get(xHolePointList.get(5)) + maxEnergyMap.get(xHolePointList.get(6)) + + maxEnergyMap.get(xHolePointList.get(7)) > hodoHitThreshold + && (maxEnergyMap.get(xHolePointList.get(5)) != 0 || maxEnergyMap.get(xHolePointList.get(6)) != 0) + && maxEnergyMap.get(xHolePointList.get(7)) != 0) { + pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_45, true); + flag = true; + } + + hodoPatterns.add(pattern); + } + // System.out.println(this.getClass().getName()+":: found "+hodoPatterns.size()+" patterns"); + + // At leaset there is a hodo tilt/cluster hit in any layer, then the pattern list is added into data manager + if(flag == true){ + // System.out.println(this.getClass().getName()+":: at least one of the patterns was good!!!"); + ReadoutDataManager.addData(outputCollectionName, hodoPatterns, HodoscopePattern.class); + } + } + + @Override + public void startOfData() { + // Define the output LCSim collection parameters. + LCIOCollectionFactory.setCollectionName(outputCollectionName); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollection patternCollectionParams = LCIOCollectionFactory + .produceLCIOCollection(HodoscopePattern.class); + + // Instantiate the GTP cluster collection with the readout + // data manager. + localTimeDisplacement = timeEarlierThanEcal + 4.0; + addDependency(inputCollectionName); + ReadoutDataManager.registerCollection(patternCollectionParams, false); + + initLists(); + } + + /** + * Initiate (layer, y) list and (x, hole) list + */ + private void initLists() { + // Add elements for layer list + layerList.add(TopLayer1); + layerList.add(TopLayer2); + layerList.add(BotLayer1); + layerList.add(BotLayer2); + + // Add elements for (x, hole) point list + xHolePointList.add(new Point(0, 0)); + xHolePointList.add(new Point(1, -1)); + xHolePointList.add(new Point(1, 1)); + xHolePointList.add(new Point(2, -1)); + xHolePointList.add(new Point(2, 1)); + xHolePointList.add(new Point(3, -1)); + xHolePointList.add(new Point(3, 1)); + xHolePointList.add(new Point(4, 0)); + } + + @Override + public void detectorChanged(Detector detector) { + // Populate the channel ID collections. + populateChannelCollections(); + } + + /** + * Populates the channel ID set and maps all existing channels to their + * respective conditions. + */ + private void populateChannelCollections() { + // Load the conditions database and get the hodoscope channel + // collection data. + final DatabaseConditionsManager conditions = DatabaseConditionsManager.getInstance(); + final HodoscopeChannelCollection channels = conditions + .getCachedConditions(HodoscopeChannelCollection.class, "hodo_channels").getCachedData(); + + // Map channels to channel IDs + for (HodoscopeChannel channel : channels) { + channelMap.put(Long.valueOf(channel.getChannelId().intValue()), channel); + } + } + + @Override + protected double getTimeDisplacement() { + return localTimeDisplacement; + } + + @Override + protected double getTimeNeededForLocalOutput() { + return 0; + } + + /** + * Sets the name of the input collection containing the objects of type + * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} that are output by the + * digitization driver. + * + * @param collection - The name of the input hit collection. + */ + public void setInputCollectionName(String collection) { + inputCollectionName = collection; + } + + /** + * Sets the name of the output collection containing the objects of type + * {@link org.hps.readout.hodoscope.HodoscopePattern HodoscopePattern} that are + * output by this driver. + * + * @param collection - The name of the output hodoscope pattern collection. + */ + public void setOutputCollectionName(String collection) { + outputCollectionName = collection; + } + + /** + * Sets hodoscope FADC hit threshold + * + * @param FADC hit threshold + */ + public void setFADCHitThreshold(double fADCHitThreshold) { + this.fADCHitThreshold = fADCHitThreshold; + } + + /** + * Sets hodoscope tilt/cluster hit threshold + * + * @param hodoscope tilt/cluster hit threshold + */ + public void setHodoHitThreshold(double hodoHitThreshold) { + this.hodoHitThreshold = hodoHitThreshold; + } + + /** + * Set persistency for hodoscope FADC hit in unit of ns + * + * @param persistency for hodoscope FADC hit in unit of ns + */ + public void setPersistentTime(double persistentTime) { + this.persistentTime = persistentTime; + } + + /** + * Set time for hodoscope FADC hits earlier to enter the trigger system than + * Ecal with unit of ns + * + * @param time for hodoscope FADC hits earlier to enter the trigger system than + * Ecal with unit of ns + */ + public void setTimeEarlierThanEcal(double timeEarlierThanEcal) { + this.timeEarlierThanEcal = timeEarlierThanEcal; + } + + /** + * Set gain factor for raw energy (self-defined unit) of FADC hits + * + * @param gain factor for raw energy (self-defined unit) of FADC hits + */ + public void setGainFactor(double gainFactor) { + this.gainFactor = gainFactor; + } +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/HodoscopeRawConverterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/HodoscopeRawConverterNoSpacingReadoutDriver.java new file mode 100755 index 000000000..337b06926 --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/HodoscopeRawConverterNoSpacingReadoutDriver.java @@ -0,0 +1,78 @@ +package org.hps.digi.nospacing; + +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; + +import org.hps.readout.ReadoutDriver; +//import org.hps.readout.RawConverterNoSpacingReadoutDriver; +import org.hps.readout.rawconverter.AbstractMode3RawConverter; +import org.hps.readout.rawconverter.HodoscopeReadoutMode3RawConverter; +import org.hps.record.daqconfig2019.ConfigurationManager2019; +import org.hps.record.daqconfig2019.DAQConfig2019; +import org.lcsim.geometry.Detector; +import org.lcsim.geometry.subdetector.Hodoscope_v1; + +/** + * HodoscopeRawConverterNoSpacingReadoutDriver is an + * implementation of {@link org.hps.readout.RawConverterReadoutDriver + * RawConverterReadoutDriver} for the hodoscope subdetector. + * + * @see org.hps.readout.RawConverterReadoutDriver + */ +public class HodoscopeRawConverterNoSpacingReadoutDriver extends RawConverterNoSpacingReadoutDriver { + /** + * The converter object responsible for processing raw hits into + * proper {@link org.lcsim.event.CalorimeterHit CalorimeterHit} + * objects. + */ + private HodoscopeReadoutMode3RawConverter converter = new HodoscopeReadoutMode3RawConverter(); + + /** + * Instantiates the driver with the correct default parameters. + */ + public HodoscopeRawConverterNoSpacingReadoutDriver() { + super("HodoscopeRawHits", "HodoscopeCorrectedHits"); + } + + /** + * Sets whether or not the DAQ configuration is applied into the driver + * the EvIO data stream or whether to read the configuration from data files. + * + * @param state - true indicates that the DAQ configuration is + * applied into the readout system, and false that it + * is not applied into the readout system. + */ + public void setDaqConfigurationAppliedintoReadout(boolean state) { + // Track changes in the DAQ configuration. + if (state) { + ConfigurationManager2019.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + // Get the DAQ configuration. + DAQConfig2019 daq = ConfigurationManager2019.getInstance(); + + // Load the DAQ settings from the configuration manager. + getConverter().setNumberSamplesAfter(daq.getHodoFADCConfig().getNSA()); + getConverter().setNumberSamplesBefore(daq.getHodoFADCConfig().getNSB()); + + // Get the FADC configuration. + getConverter().setFADCConfigHodo2019(daq.getHodoFADCConfig()); + } + }); + } + } + + @Override + protected AbstractMode3RawConverter getConverter() { + return converter; + } + + @Override + protected String getSubdetectorReadoutName(Detector detector) { + Hodoscope_v1 hodoscopeGeometry = (Hodoscope_v1) detector.getSubdetector("Hodoscope"); + return hodoscopeGeometry.getReadout().getName(); + } + + @Override + protected void updateDetectorDependentParameters(Detector detector) { } +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/NoSpacingTriggerDriver.java.donothing b/digi/src/main/java/org/hps/digi/nospacing/NoSpacingTriggerDriver.java.donothing new file mode 100755 index 000000000..df2678722 --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/NoSpacingTriggerDriver.java.donothing @@ -0,0 +1,159 @@ +package org.hps.digi.nospacing; + +import org.hps.readout.ReadoutDataManager; +import org.hps.readout.ReadoutDriver; + +/** + * Class NoSpacingTriggerDriver is a special subclass of {@link + * org.hps.readout.ReadoutDriver ReadoutDriver} that is responsible + * for simulating trigger behavior. It implements additional behavior + * for handling trigger dead times and issuing triggers to the {@link + * org.hps.readout.ReadoutDataManager ReadoutDataManager}.

+ * Implementing drivers are responsible for checking if trigger + * conditions are met. In the event that they are, the method {@link + * org.hps.readout.NoSpacingTriggerDriver#sendTrigger() sendTrigger()} should + * be used to issue the trigger to the data manager. This method will + * automatically check that the dead time condition is met, and will + * only issue the trigger command in the event that it is, so + * implementing drivers do not need to check this condition manually. + *

+ * For usage instructions, please see ReadoutDriver. + * @see org.hps.readout.ReadoutDriver + */ +public abstract class NoSpacingTriggerDriver extends ReadoutDriver { + /** + * singles trigger types + */ + public static final String SINGLES0 = "singles0"; + public static final String SINGLES1 = "singles1"; + public static final String SINGLES2 = "singles2"; + public static final String SINGLES3 = "singles3"; + + public static final String TOP = "top"; + public static final String BOT = "bot"; + public static final String TOPBOT = "topbot"; + + public static final String PAIR0 = "pair0"; + public static final String PAIR1 = "pair1"; + public static final String PAIR2 = "pair2"; + public static final String PAIR3 = "pair3"; + + public static final String PULSER = "pulser"; + + public static final String FEE = "fee"; + + /** + * The amount of time that must pass after a trigger before a new + * trigger can be issued, in units of nanoseconds. + */ + private double deadTime = 0.0; + /** + * The last time at which a trigger was issued to the data + * manager, in units of nanoseconds. + */ + private double lastTrigger = Double.NaN; + + /** + * Checks whether the trigger is currently in dead time or not. + * @return Returns true if the trigger is currently + * in dead time, and false if it is not and a + * trigger may be issued. + */ + protected boolean isInDeadTime() { + if(Double.isNaN(lastTrigger)) { return false; } + else { return (lastTrigger + deadTime) > ReadoutDataManager.getCurrentTime(); } + } + + @Override + protected boolean isPersistent() { + throw new UnsupportedOperationException(); + } + + /** + * Gets the dead time for this trigger. + * @return Returns the dead time in units of nanoseconds. + */ + protected double getDeadTime() { + return deadTime; + } + + /** + * Gets the time at which the last trigger occurred. + * @return Returns the last trigger time in units of nanoseconds, + * or as {@link java.lang.Double#NaN Double.NaN} if no trigger + * has occurred yet. + */ + protected double getLastTriggerTime() { + return lastTrigger; + } + + @Override + protected double getReadoutWindowAfter() { + throw new UnsupportedOperationException(); + } + + @Override + protected double getReadoutWindowBefore() { + throw new UnsupportedOperationException(); + } + + /** + * Issues a trigger to the data manager so long as the trigger is + * not presently in dead time. + */ + protected void sendTrigger() { + if(!isInDeadTime()) { + ReadoutDataManager.sendTrigger(this); + lastTrigger = ReadoutDataManager.getCurrentTime(); + } + } + + /** + * Issues a trigger to the data manager so long as the trigger is + * not presently in dead time. + * @param trigger type + */ + protected void sendTrigger(String triggerType) { + if(!isInDeadTime()) { + ReadoutDataManager.sendTrigger(this, triggerType); + lastTrigger = ReadoutDataManager.getCurrentTime(); + } + } + + /** + * Issues a trigger to the data manager so long as the trigger is + * not presently in dead time. + * @param trigger type + * @param top/bot singles trigger + */ + protected void sendTrigger(String triggerType, String topBot) { + if(!isInDeadTime()) { + ReadoutDataManager.sendTrigger(this, triggerType, topBot); + lastTrigger = ReadoutDataManager.getCurrentTime(); + } + } + + /** + * Sets the dead time for the trigger. + * @param samples - The amount of time (in events) before another + * trigger is allowed to occur. + */ + public void setDeadTime(int samples) { + deadTime = samples * ReadoutDataManager.getBeamBunchSize(); + } + + @Override + public void setPersistent(boolean state) { + throw new UnsupportedOperationException(); + } + + @Override + public void setReadoutWindowAfter(double value) { + throw new UnsupportedOperationException(); + } + + @Override + public void setReadoutWindowBefore(double value) { + throw new UnsupportedOperationException(); + } +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/RawConverterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/RawConverterNoSpacingReadoutDriver.java new file mode 100755 index 000000000..a13db77be --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/RawConverterNoSpacingReadoutDriver.java @@ -0,0 +1,259 @@ +package org.hps.digi.nospacing; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import org.hps.readout.ReadoutDriver; +import org.hps.readout.rawconverter.AbstractMode3RawConverter; +import org.hps.readout.util.collection.LCIOCollectionFactory; +import org.lcsim.event.CalorimeterHit; +import org.lcsim.event.EventHeader; +import org.lcsim.event.RawCalorimeterHit; +import org.lcsim.geometry.Detector; +import org.lcsim.lcio.LCIOConstants; +import org.hps.readout.ReadoutDataManager; +/** + * RawConverterNoSpacingReadoutDriver processes ADC hit data + * objects and converts them to energy hit objects. It serves as an + * interface to a {@link + * org.hps.readout.rawconverter.AbstractMode3RawConverter + * AbstractMode3RawConverter} object, where the actual conversion is + * performed. + *

+ * RawConverterNoSpacingReadoutDriver itself is abstract - it + * requires that implementing classes handle any subdetector-specific + * functionality. + */ +public abstract class RawConverterNoSpacingReadoutDriver extends ReadoutDriver { + /** + * Sets the name of the input {@link + * org.lcsim.event.RawCalorimeterHit RawCalorimeterHit} + * collection. + */ + private String inputCollectionName; + + /** + * Sets the name of the output {@link + * org.lcsim.event.CalorimeterHit CalorimeterHit} collection. + */ + private String outputCollectionName; + + /** + * Tracks the current local time in nanoseconds for this driver. + */ + private double localTime = 0.0; + + //size to look for hits in 4ns clock ticks + private double EVENT_WINDOW=48; + + /** + * Indicates whether channels that are marked as "bad" in the + * conditions database should be skipped when producing hits. + */ + protected boolean skipBadChannels = false; + + protected boolean checkInput = false; + + protected RawConverterNoSpacingReadoutDriver(String defaultInputCollectionName, String defaultOutputCollectionName) { + inputCollectionName = defaultInputCollectionName; + outputCollectionName = defaultOutputCollectionName; + } + + @Override + public final void detectorChanged(Detector detector) { + // Allow implementing drivers to catch the detector changed + // event, if needed. + updateDetectorDependentParameters(detector); + + // Update the converter. + getConverter().updateDetector(detector); + + // Update the readout name for the managed collection. + ReadoutDataManager.updateCollectionReadoutName(outputCollectionName, CalorimeterHit.class, getSubdetectorReadoutName(detector)); + } + + @Override + public final void process(EventHeader event) { + // Check the data management driver to determine whether the + // input collection is available or not. + if(checkInput&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + 4.0)) { + // System.out.println(this.getClass().getName()+":: checkInput or Collection status Failed"); + return; + } + + // Get all of the raw hits in the current clock-cycle. + // Collection rawHits = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionName, RawCalorimeterHit.class); + Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 4.0*EVENT_WINDOW, inputCollectionName, RawCalorimeterHit.class); + // System.out.println("RawConverter:: "+ inputCollectionName+" local time = "+localTime+" number of seeds = "+rawHits.size()); + // Increment the local time. + // localTime += 4.0; + + // Pass the raw hits to the raw converter to obtain proper + // calorimeter hits. In readout, raw hits are always Mode-3, + // so there is no need to check the form. + List newHits = new ArrayList(); + + for(RawCalorimeterHit hit : rawHits) { + // Convert the raw hit. + CalorimeterHit newHit = getConverter().convertHit(hit, 0.0); + + // If the hit is on a bad channel, and these are set to + // be skipped, ignore the hit. Otherwise, add it to the + // output list. + if(skipBadChannels && isBadChannel(newHit.getCellID())) { + continue; + } + + // Add the new hit. + newHits.add(newHit); + } + // System.out.println("RawConverter:: "+ outputCollectionName+" adding new hits with size = "+newHits.size()+" at time = "+localTime); + // Add the calorimeter hit collection to the data manager. + ReadoutDataManager.addData(outputCollectionName, newHits, CalorimeterHit.class); + // Increment the local time for real. + localTime += 4.0*125; + + } + + @Override + public void startOfData() { + // Set the LCIO flags for the output collection. Flags are + // set to store the hit time and hit position respectively. + int flags = 0; + flags += 1 << LCIOConstants.RCHBIT_TIME; + flags += 1 << LCIOConstants.RCHBIT_LONG; + + // Define the LCSim collection parameters for this driver's + // output. + LCIOCollectionFactory.setCollectionName(outputCollectionName); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollectionFactory.setFlags(flags); + + // Set the dependencies for the driver and register its + // output collections with the data management driver. + addDependency(inputCollectionName); + + // Register the output collection. + ReadoutDataManager.registerCollection(LCIOCollectionFactory.produceLCIOCollection(CalorimeterHit.class), isPersistent(), + getReadoutWindowBefore(), getReadoutWindowAfter()); + } + + /** + * Gets the {@link org.hps.readout.ReadoutRawConverter + * ReadoutRawConverter} object used to convert hits for this + * subdetector. + * @return Returns the raw converter. + */ + protected abstract AbstractMode3RawConverter getConverter(); + + /** + * Gets the readout name for this subdetector from the geometry. + * @param detector - The geometry object. + * @return Returns the subdetector readout name. + */ + protected abstract String getSubdetectorReadoutName(Detector detector); + + @Override + protected final double getTimeDisplacement() { + return 0; + } + + @Override + protected final double getTimeNeededForLocalOutput() { + return 0; + } + + /** + * Indicates whether or not the channel on which a hit occurs is + * a "bad" channel according to the conditions database. + * @param hit - The hit to check. + * @return Returns true if the hit channel is + * flagged as "bad" and false otherwise. + * @throws UnsupportedOperationException Occurs if the + * subdetector represented by the driver does not support bad + * channel exclusion. + */ + protected boolean isBadChannel(long channelID) { + throw new UnsupportedOperationException("Driver \"" + getClass().getSimpleName() + "\" does not support bad channel exclusion."); + } + + /** + * Updates any detector-specific parameters needed by the + * implementing class. + * @param detector - The current detector geometry. + */ + protected abstract void updateDetectorDependentParameters(Detector detector); + + /** + * Sets the name of the input collection containing the objects + * of type {@link org.lcsim.event.RawCalorimeterHit + * RawCalorimeterHit} that are output by the digitization driver. + * @param collection - The name of the input raw hit collection. + */ + public void setInputCollectionName(String collection) { + inputCollectionName = collection; + } + + /** + * Sets the number of integration samples that should be included + * in a pulse integral after the threshold-crossing event. + * @param samples - The number of samples, where a sample is a + * 4 ns clock-cycle. + */ + public void setNumberSamplesAfter(int samples) { + getConverter().setNumberSamplesAfter(4 * samples); + } + + /** + * Sets the number of integration samples that should be included + * in a pulse integral before the threshold-crossing event. + * @param samples - The number of samples, where a sample is a + * 4 ns clock-cycle. + */ + public void setNumberSamplesBefore(int samples) { + getConverter().setNumberSamplesBefore(4 * samples); + } + + /** + * Sets factor of unit conversion for returned value of the method + * AbstractBaseRawConverter::adcToEnergy(). + * @param factor of unit conversion + */ + public void setFactorUnitConversion(double factor) { + getConverter().setFactorUnitConversion(factor); + } + + /** + * Sets the name of the output collection containing the objects + * of type {@link org.lcsim.event.CalorimeterHit CalorimeterHit} + * that are output by this driver. + * @param collection - The name of the output hit collection. + */ + public void setOutputCollectionName(String collection) { + outputCollectionName = collection; + } + + /** + * Indicates whether or not data from channels flagged as "bad" + * in the conditions system should be ignored. true + * indicates that they should be ignored, and false + * that they should not. + * @param apply - true indicates that "bad" channels + * will be ignored and false that they will not. + * @throws UnsupportedOperationException Occurs if the + * subdetector represented by the driver does not support bad + * channel exclusion. + */ + public void setSkipBadChannels(boolean state) { + throw new UnsupportedOperationException("Driver \"" + getClass().getSimpleName() + "\" does not support bad channel exclusion."); + } + + /** + * Sets the size of the ADC buffer. This is needed for proper + * handling of Mode-3 hits in the raw converter. + * @param window - The buffer size in units of 4 ns clock-cycles. + */ + public void setReadoutWindow(int window) { + getConverter().setWindowSamples(window); + } +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/SinglesTrigger2019NoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/SinglesTrigger2019NoSpacingReadoutDriver.java new file mode 100644 index 000000000..bf7460e07 --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/SinglesTrigger2019NoSpacingReadoutDriver.java @@ -0,0 +1,415 @@ +package org.hps.digi.nospacing; + +import java.util.Collection; +import java.util.List; +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.util.ArrayList; + +import org.hps.readout.ReadoutDataManager; +//import org.hps.digi.nospacing.NoSpacingTriggerDriver; +import org.hps.readout.TriggerDriver; +import org.hps.record.daqconfig2019.ConfigurationManager2019; +import org.hps.record.daqconfig2019.DAQConfig2019; +import org.hps.record.triggerbank.TriggerModule2019; +import org.lcsim.event.Cluster; +import org.lcsim.event.EventHeader; +import org.lcsim.geometry.Detector; +import org.lcsim.geometry.subdetector.HPSEcal3; +import org.lcsim.util.aida.AIDA; + +import org.hps.readout.util.HodoscopePattern; + +import hep.aida.IHistogram1D; +import hep.aida.IHistogram2D; + +/** + * SinglesTrigger2019NoSpacingReadoutDriver simulates an HPS singles trigger + * for 2019 MC. It takes in clusters produced by the + * {@link org.hps.readout.ecal.updated.GTPClusterReadoutDriver + * GTPClusterReadoutDriver} and hodoscope patterns produced by the + * {@link HodoscopePatternReadoutDriver}, and perform the necessary trigger + * logic on them. If a trigger is detected, it is sent to the readout data + * manager so that a triggered readout event may be written. + */ +public class SinglesTrigger2019NoSpacingReadoutDriver extends TriggerDriver { + // ============================================================== + // ==== LCIO Collections ======================================== + // ============================================================== + /** + * Indicates singles trigger type. Corresponding DAQ configuration is accessed by DAQ + * configuration system, and applied into readout. + */ + private String triggerType = "singles3"; + + /** + * Indicates the name of the calorimeter geometry object. This is + * needed to allow access to the calorimeter channel listings. + */ + private String ecalGeometryName = "Ecal"; + /** + * Specifies the name of the LCIO collection containing the input + * GTP clusters that are used for triggering. + */ + private String inputCollectionNameEcal = "EcalClustersGTP"; + + private String inputCollectionNameHodo = "HodoscopePatterns"; + + // ============================================================== + // ==== Driver Options ========================================== + // ============================================================== + + /** + * Specifies the beam energy for the input data. This defines the + * limits of the energy trigger plots and has no further effect. + */ + private double beamEnergy = 4.55; + /** + * Stores the trigger settings and performs trigger logic. + */ + private TriggerModule2019 triggerModule = new TriggerModule2019(); + + private double ecalTimeDisplacement = 20.0; //ns + private double hodoTimeDisplacement = 4.0; //ns + + boolean requireHodo=true; + + // ============================================================== + // ==== Driver Parameters ======================================= + // ============================================================== + + /** + * Tracks the current local time in nanoseconds for this driver. + */ + private double localTime = 0.0; + /** + * Stores a reference to the calorimeter subdetector model. This + * is needed to extract the crystal indices from the cell ID. + */ + private HPSEcal3 ecal = null; + /** + * Defines the size of an energy bin for trigger output plots. + */ + private static final double BIN_SIZE = 0.025; + + + // ============================================================== + // ==== AIDA Plots ============================================== + // ============================================================== + + private AIDA aida = AIDA.defaultInstance(); + private static final int NO_CUTS = 0; + private static final int WITH_CUTS = 1; + private IHistogram1D[] clusterSeedEnergy = new IHistogram1D[2]; + private IHistogram1D[] clusterHitCount = new IHistogram1D[2]; + private IHistogram1D[] clusterTotalEnergy = new IHistogram1D[2]; + private IHistogram2D[] clusterDistribution = new IHistogram2D[2]; + + /** + * Sets whether or not the DAQ configuration is applied into the driver + * the EvIO data stream or whether to read the configuration from data files. + * + * @param state - true indicates that the DAQ configuration is + * applied into the readout system, and false that it + * is not applied into the readout system. + */ + public void setDaqConfigurationAppliedintoReadout(boolean state) { + // If the DAQ configuration should be read, attach a listener + // to track when it updates. + if (state) { + ConfigurationManager2019.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + // Get the DAQ configuration. + DAQConfig2019 daq = ConfigurationManager2019.getInstance(); + if(triggerType.contentEquals(SINGLES3)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles3Config()); + else if(triggerType.equals(SINGLES2)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles2Config()); + else if(triggerType.equals(SINGLES1)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles1Config()); + else if(triggerType.equals(SINGLES0)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles0Config()); + } + }); + } + } + + @Override + public void detectorChanged(Detector detector) { + // Get the calorimeter sub-detector. + org.lcsim.geometry.compact.Subdetector ecalSub = detector.getSubdetector(ecalGeometryName); + if(ecalSub instanceof HPSEcal3) { + ecal = (HPSEcal3) ecalSub; + } else { + throw new IllegalStateException("Error: Unexpected calorimeter sub-detector of type \"" + ecalSub.getClass().getSimpleName() + "; expected HPSEcal3."); + } + } + + @Override + public void process(EventHeader event) { + // Check that clusters are available for the trigger. + Collection clusters = null; + Collection hodoPatterns = null; + ArrayList hodoPatternList = null; + // System.out.println(this.getClass().getName()+":: starting process"); + if(ReadoutDataManager.checkCollectionStatus(inputCollectionNameEcal, ReadoutDataManager.getCurrentTime()-ecalTimeDisplacement) && ReadoutDataManager.checkCollectionStatus(inputCollectionNameHodo, ReadoutDataManager.getCurrentTime()-hodoTimeDisplacement)) { + clusters = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime()-ecalTimeDisplacement, ReadoutDataManager.getCurrentTime() -ecalTimeDisplacement+ 192.0, inputCollectionNameEcal, Cluster.class); + hodoPatterns = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime()-hodoTimeDisplacement, ReadoutDataManager.getCurrentTime() -hodoTimeDisplacement+ 192.0, inputCollectionNameHodo, HodoscopePattern.class); + + // System.out.println(this.getClass().getName()+":: number of gtp clusters = "+clusters.size()); + // System.out.println(this.getClass().getName()+":: number of hodo patterns = "+hodoPatterns.size()); + if(clusters.size() == 0){ + // System.out.println(this.getClass().getName()+":: quitting because no gtp clusters"); + return; + } + + if( requireHodo&&hodoPatterns.size() == 0){ + // System.out.println(this.getClass().getName()+":: quitting because no hodo patterns"); + return; + } + hodoPatternList = new ArrayList<>(hodoPatterns); + + } else { + System.out.println(this.getClass().getName()+":: cluster or hodo collection doesn't exist"); + return; + } + + // Track whether or not a trigger was seen. + boolean triggered = false; + + // There is no need to perform the trigger cuts if the + // trigger is in dead time, as no trigger may be issued + // regardless of the outcome. + if(isInDeadTime()) { + System.out.println(this.getClass().getName()+":: I'm in deadtime ... bailing"); + return; + } + + // Record top/bot status for singles triggers + List topBot = new ArrayList(); + + // Plot the trigger distributions before trigger cuts are + // performed. + for(Cluster cluster : clusters) { + // Get the x and y indices. Note that LCSim meta data is + // not available during readout, so crystal indices must + // be obtained directly from the calorimeter geometry. + java.awt.Point ixy = ecal.getCellIndices(cluster.getCalorimeterHits().get(0).getCellID()); + + // Populate the uncut plots. + clusterSeedEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterSeedEnergy(cluster)); + clusterTotalEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterTotalEnergy(cluster)); + clusterHitCount[NO_CUTS].fill(TriggerModule2019.getClusterHitCount(cluster)); + clusterDistribution[NO_CUTS].fill(ixy.x, ixy.y); + + // Perform the hit count cut. + if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_HIT_COUNT_LOW_EN) && !triggerModule.clusterHitCountCut(cluster)) { + // System.out.println(this.getClass().getName()+":: this cluster has too few hits ... continue"); + continue; + } + + // Perform the cluster energy cut. + if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_LOW_EN) && !triggerModule.clusterTotalEnergyCutLow(cluster)) { + // System.out.println(this.getClass().getName()+":: this cluster has too low an energy ... continue"); + + continue; + } + + if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_HIGH_EN) && !triggerModule.clusterTotalEnergyCutHigh(cluster)) { + // System.out.println(this.getClass().getName()+":: this cluster has too HIGH an energy ... continue"); + + continue; + } + // System.out.println(this.getClass().getName()+":: this cluster survived!"); + // In the setup calorimeter geometry, range of X coordinates is [-23, -1] and [1, 23]. + // The hardware uses cluster X coordinates [-22,0] and [1,23]. + int clusterX = ixy.x; + if(clusterX < 0) clusterX++; + + int clusterY = ixy.y; + + // XMin is at least 0. + if(!triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { + if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_XMIN_EN) && !triggerModule.clusterXMinCut(clusterX)) { + // System.out.println(this.getClass().getName()+":: trigger type = "+triggerType+" no trigger because cluster X failed"); + continue; + } + + // XMin cut has been applied. + if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_PDE_EN) && !triggerModule.clusterPDECut(cluster, clusterX)) { + // System.out.println(this.getClass().getName()+":: trigger type = "+triggerType+" no trigger because cluster Energy vs X failed"); + continue; + } + } + + if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && !triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { + // System.out.println(this.getClass().getName()+":: trigger type = "+triggerType+"no trigger because hodo matching failed"); + continue; + } + + //For 2021 update, Moller triggers + if(triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { + if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterXMinCut(clusterX)) { + continue; + } + if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterXMaxCut(clusterX)) { + continue; + } + if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterYMinCut(clusterY)) { + continue; + } + if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterYMaxCut(clusterY)) { + continue; + } + if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_PDE_EN) && !triggerModule.clusterMollerPDECut(cluster, clusterX)) { + continue; + } + } + // System.out.println(this.getClass().getName()+":: found a trigger!!!"); + + // Note that a trigger occurred. + triggered = true; + + if(ixy.y > 0) topBot.add(TOP); + else topBot.add(BOT); + + // Populate the cut plots. + clusterSeedEnergy[WITH_CUTS].fill(TriggerModule2019.getValueClusterSeedEnergy(cluster)); + clusterTotalEnergy[WITH_CUTS].fill(TriggerModule2019.getValueClusterTotalEnergy(cluster)); + clusterHitCount[WITH_CUTS].fill(TriggerModule2019.getClusterHitCount(cluster)); + clusterDistribution[WITH_CUTS].fill(ixy.x, ixy.y); + } + + if(triggered) { + boolean topStat = false; + boolean botStat = false; + if(topBot.contains(TOP)) topStat = true; + if(topBot.contains(BOT)) botStat = true; + // System.out.println(this.getClass().getName()+":: Sending Trigger"); + if(topStat && botStat) sendTrigger(triggerType, TOPBOT); + else if(topStat) sendTrigger(triggerType, TOP); + else sendTrigger(triggerType, BOT); + } + } + + @Override + public void startOfData() { + // Define the driver collection dependencies. + addDependency(inputCollectionNameEcal); + + addDependency(inputCollectionNameHodo); + + // Register the trigger. + ReadoutDataManager.registerTrigger(this); + + // Set the plot range based on the beam energy. + int bins = (int) Math.ceil((beamEnergy * 1.1) / BIN_SIZE); + double xMax = bins * BIN_SIZE; + + // Instantiate the trigger plots. + String[] postscripts = { " (No Cuts)", " (With Cuts)" }; + for(int i = NO_CUTS; i <= WITH_CUTS; i++) { + clusterSeedEnergy[i] = aida.histogram1D("Trigger Plots\\Cluster Seed Energy Distribution" + postscripts[i], bins, 0.0, xMax); + clusterHitCount[i] = aida.histogram1D("Trigger Plots\\Cluster Hit Count Distribution" + postscripts[i], 10, -0.5, 9.5); + clusterTotalEnergy[i] = aida.histogram1D("Trigger Plots\\Cluster Total Energy Distribution" + postscripts[i], bins, 0.0, xMax); + clusterDistribution[i] = aida.histogram2D("Trigger Plots\\Cluster Seed Distribution" + postscripts[i], 46, -23, 23, 11, -5.5, 5.5); + } + + // Run the superclass method. + super.startOfData(); + } + + @Override + protected double getTimeDisplacement() { + return 0; + } + + @Override + protected double getTimeNeededForLocalOutput() { + return 0; + } + + /** + * Defines the name of the calorimeter geometry specification. By + * default, this is "Ecal". + * @param ecalName - The calorimeter name. + */ + public void setEcalGeometryName(String value) { + ecalGeometryName = value; + } + + /** + * Sets the name of the LCIO collection from which clusters are + * drawn. + * @param collection - The name of the LCIO collection. + */ + public void setInputCollectionNameEcal(String collection) { + inputCollectionNameEcal = collection; + } + + public void setInputCollectionNameHodo(String collection) { + inputCollectionNameHodo = collection; + } + + public void setTriggerType(String trigger) { + if(!trigger.equals(SINGLES0) && !trigger.equals(SINGLES1) && !trigger.equals(SINGLES2) && !trigger.equals(SINGLES3)) + throw new IllegalArgumentException("Error: wrong trigger type name \"" + trigger + "\"."); + triggerType = trigger; + } + + /** + * Sets the beam energy for the trigger. This is only used to + * determine the range of the x-axis for trigger plots. + * @param value - The beam energy of the input data, in units of + * GeV. + */ + public void setBeamEnergy(double value) { + beamEnergy = value; + } + + /** + * Sets the minimum hit count threshold for the trigger. This + * value is inclusive. + * @param hitCountThreshold - The value of the threshold. + */ + public void setHitCountThreshold(int hitCountThreshold) { + triggerModule.setCutValue(TriggerModule2019.CLUSTER_HIT_COUNT_LOW, hitCountThreshold); + } + + /** + * Sets the lower bound for the cluster energy threshold on the + * trigger. This value is inclusive. + * @param clusterEnergyLow - The value of the threshold. + */ + public void setClusterEnergyLowThreshold(double clusterEnergyLow) { + triggerModule.setCutValue(TriggerModule2019.CLUSTER_TOTAL_ENERGY_LOW, clusterEnergyLow); + } + + /** + * Sets the upper bound for the cluster energy threshold on the + * trigger. This value is inclusive. + * @param clusterEnergyHigh - The value of the threshold. + */ + public void setClusterEnergyHighThreshold(double clusterEnergyHigh) { + triggerModule.setCutValue(TriggerModule2019.CLUSTER_TOTAL_ENERGY_HIGH, clusterEnergyHigh); + } + + + public void setClusterXMin(double xMin) { + triggerModule.setCutValue(TriggerModule2019.CLUSTER_XMIN, xMin); + } + + public void setClusterPDEC0(double pdeC0) { + triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C0, pdeC0); + } + + public void setClusterPDEC1(double pdeC1) { + triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C1, pdeC1); + } + + public void setClusterPDEC2(double pdeC2) { + triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C2, pdeC2); + } + + public void setClusterPDEC3(double pdeC3) { + triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C3, pdeC3); + } + +} diff --git a/digi/src/main/java/org/hps/digi/nospacing/SvtDigiWithPulserNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/SvtDigiWithPulserNoSpacingReadoutDriver.java new file mode 100755 index 000000000..f8673ec37 --- /dev/null +++ b/digi/src/main/java/org/hps/digi/nospacing/SvtDigiWithPulserNoSpacingReadoutDriver.java @@ -0,0 +1,867 @@ +package org.hps.digi.nospacing; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.PriorityQueue; +import java.util.Set; +import org.hps.conditions.database.DatabaseConditionsManager; +import org.hps.conditions.svt.SvtTimingConstants; +import org.hps.readout.svt.HPSSVTConstants; +import org.lcsim.detector.tracker.silicon.ChargeCarrier; +import org.lcsim.detector.tracker.silicon.HpsSiSensor; +import org.lcsim.detector.tracker.silicon.SiSensor; +import org.lcsim.geometry.Detector; +import org.lcsim.lcio.LCIOConstants; +import org.lcsim.event.EventHeader; +import org.lcsim.event.LCRelation; +import org.lcsim.event.MCParticle; +import org.lcsim.event.RawTrackerHit; +import org.lcsim.event.SimTrackerHit; +import org.lcsim.event.base.BaseLCRelation; +import org.lcsim.event.base.BaseRawTrackerHit; +import org.lcsim.recon.tracking.digitization.sisim.CDFSiSensorSim; +import org.lcsim.recon.tracking.digitization.sisim.SiElectrodeData; +import org.lcsim.recon.tracking.digitization.sisim.SiElectrodeDataCollection; +import org.lcsim.recon.tracking.digitization.sisim.SiSensorSim; +import org.lcsim.recon.tracking.digitization.sisim.config.SimTrackerHitReadoutDriver; +import org.hps.readout.ReadoutDataManager; +import org.hps.readout.ReadoutDriver; +import org.hps.readout.ReadoutTimestamp; +import org.hps.readout.util.collection.LCIOCollection; +import org.hps.readout.util.collection.LCIOCollectionFactory; +import org.hps.readout.util.collection.TriggeredLCIOData; +import org.hps.recon.tracking.PulseShape; +import org.hps.util.RandomGaussian; + +/** + * SVT readout simulation. + * + * @author Sho Uemura + */ +public class SvtDigiWithPulserNoSpacingReadoutDriver extends ReadoutDriver { + //-----------------// + //--- Constants ---// + //-----------------// + private static final String SVT_SUBDETECTOR_NAME = "Tracker"; + private PulseShape shape = new PulseShape.FourPole(); + + private SimTrackerHitReadoutDriver readoutDriver = new SimTrackerHitReadoutDriver(); + private SiSensorSim siSimulation = new CDFSiSensorSim(); + private Map[]> hitMap = new HashMap[]>(); + private Map[]> pulserHitMap = new HashMap[]>(); + private List sensors = null; + + // readout period time offset in ns + private double readoutOffset = 0.0; + private double readoutLatency = 280.0; + // private double pileupCutoff = 300.0; + private double pileupCutoff = 0.0; + private String readout = "TrackerHits"; + private double timeOffset = 30.0; + private boolean noPileup = false; + private boolean addNoise = true; + + private boolean useTimingConditions = false; + + // cut settings + private boolean enableThresholdCut = true; + private int samplesAboveThreshold = 3; + private double noiseThreshold = 2.0; + private boolean enablePileupCut = true; + private boolean dropBadChannels = true; + private boolean debug_=false; + + // Collection Names + private String outputCollection = "SVTRawTrackerHits"; + private String relationCollection = "SVTTrueHitRelations"; + + private LCIOCollection trackerHitCollectionParams; + private LCIOCollection truthRelationsCollectionParams; + private LCIOCollection truthHitsCollectionParams; + /** + * The name of the input {@link org.lcsim.event.RawTrackerHit + * RawTrackerHit} collection from pulser data. + */ + private String pulserDataCollectionName = "SVTRawTrackerHits"; + + public SvtDigiWithPulserNoSpacingReadoutDriver() { + add(readoutDriver); + } + + /** + * Indicates whether or not noise should be simulated when analog + * hits are generated. + * @param addNoise - true adds noise simulation to + * analog hits, while false uses only contributions + * from pulses generated from truth data. + */ + public void setAddNoise(boolean addNoise) { + this.addNoise = addNoise; + } + + /** + * Indicates whether hits consistent with pile-up effects should + * be dropped or not. A hit is considered to be consistent with + * pile-up effects if its earlier sample indices are larger than + * the later ones, suggesting that it includes the trailing end + * of another pulse from earlier in time. + * @param enablePileupCut - true enables the cut and + * drops pile-up hits, while false disables the cut + * and retains them. + */ + public void setEnablePileupCut(boolean enablePileupCut) { + this.enablePileupCut = enablePileupCut; + } + + /** + * Indicates whether noisy analog hits should be retained in + * readout. Hits are required to have a certain number of samples + * that exceeds a programmable noise threshold. The required + * number of samples may be set by the method {@link + * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setSamplesAboveThreshold(int) + * setSamplesAboveThreshold(int)} and the noise threshold may be + * set with the method {@link + * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setNoiseThreshold(double) + * setNoiseThreshold(double)}. + * @param enableThresholdCut - true enables the cut + * and drops noisy hits, while false disables the + * cut and retains them. + */ + public void setEnableThresholdCut(boolean enableThresholdCut) { + this.enableThresholdCut = enableThresholdCut; + } + + /** + * Sets the noise threshold used in conjunction with the sample + * threshold cut. The cut is enabled or disabled via the method + * {@link + * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setEnableThresholdCut(boolean) + * setEnableThresholdCut(boolean)}. + * @param noiseThreshold - The noise threshold. + */ + public void setNoiseThreshold(double noiseThreshold) { + this.noiseThreshold = noiseThreshold; + } + + /** + * Sets the number of smaples that must be above the noise + * threshold as employed by the sample threshold cut. The cut is + * enabled or disabled via the method {@link + * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setEnableThresholdCut(boolean) + * setEnableThresholdCut(boolean)}. + * @param samplesAboveThreshold - The number of samples. Only six + * samples are used, so values above six will result in every hit + * being rejected. Values of zero or lower will result in the + * acceptance of every hit. Threshold cut is inclusive. + */ + public void setSamplesAboveThreshold(int samplesAboveThreshold) { + this.samplesAboveThreshold = samplesAboveThreshold; + } + + /** + * Indicates whether pile-up should be simulated. If set to + * false, analog hits are generated from the truth + * hits of a given event individually, with no contribution from + * neighboring events included. If set to true, data + * from multiple events is included. + * @param noPileup - true uses data from neighboring + * events when generating analog hits, while false + * uses only contributions from a single event. + */ + public void setNoPileup(boolean noPileup) { + this.noPileup = noPileup; + } + + /** + * Specifies whether analog hits which occur on "bad" channels + * should be included in readout data or not. + * @param dropBadChannels - true means that "bad" + * channel hits will be excluded from readout, while + * false means that they will be retained. + */ + public void setDropBadChannels(boolean dropBadChannels) { + this.dropBadChannels = dropBadChannels; + } + + /** + * Set the readout latency. This does not directly correspond to + * any internal function in the readout simulation, but affects + * what range of SVT ADC values are output around the trigger. It + * is retained to allow a matching to the hardware function. + * @param readoutLatency - The readout latency to use. + */ + public void setReadoutLatency(double readoutLatency) { + this.readoutLatency = readoutLatency; + } + + /** + * Sets whether to use manually defined timing conditions, or if + * they should be loaded from the conditions database. + * @param useTimingConditions - true uses the values + * from the database, and false the manually defined + * values. + */ + public void setUseTimingConditions(boolean useTimingConditions) { + this.useTimingConditions = useTimingConditions; + } + + /** + * Sets the pulse shape to be used when emulating the analog hit + * response. Valid options are CRRC and + * FourPole. + * @param pulseShape - The pulse shape to be used. + */ + public void setPulseShape(String pulseShape) { + switch (pulseShape) { + case "CR-RC": + shape = new PulseShape.CRRC(); + break; + case "FourPole": + shape = new PulseShape.FourPole(); + break; + default: + throw new RuntimeException("Unrecognized pulseShape: " + pulseShape); + } + } + /** + * Sets the name of the input pulser data collection name. + * @param collection - The collection name. + */ + public void setPulserDataCollectionName(String collection) { + this.pulserDataCollectionName = collection; + } + + @Override + public void detectorChanged(Detector detector) { + // TODO: What does this "SimTrackerHitReadoutDriver" do? + String[] readouts = { readout }; + readoutDriver.setCollections(readouts); + + // Get the collection of all silicon sensors from the SVT. + sensors = detector.getSubdetector(SVT_SUBDETECTOR_NAME).getDetectorElement().findDescendants(HpsSiSensor.class); + + // If pile-up simulation is disabled, instantiate all + // possible processing queues. For the pile-up simulation, + // these are generated as needed. + if(!noPileup) { + for(HpsSiSensor sensor : sensors) { + @SuppressWarnings("unchecked") + int nChans=640; + if(sensor.getNumberOfChannels()==510) + nChans=512; + //really dumb way to account for channels not read out + PriorityQueue[] hitQueues = new PriorityQueue[nChans]; + PriorityQueue[] pulserHitQueues = new PriorityQueue[nChans]; + hitMap.put(sensor, hitQueues); + pulserHitMap.put(sensor, pulserHitQueues); + } + } + + // Load timing conditions from the conditions database, if + // this is requested. + if(useTimingConditions) { + SvtTimingConstants timingConstants = DatabaseConditionsManager.getInstance().getCachedConditions(SvtTimingConstants.SvtTimingConstantsCollection.class, "svt_timing_constants").getCachedData().get(0); + readoutOffset = 4 * (timingConstants.getOffsetPhase() + 3); + // readoutLatency = 248.0 + timingConstants.getOffsetTime(); + readoutLatency = readoutLatency + timingConstants.getOffsetTime(); + System.out.println(this.getClass().getName()+":: readout offset = "+readoutOffset+" latency = "+readoutLatency); + } + } + + @Override + public void process(EventHeader event) { + super.process(event); + // get the pulser hits + Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, pulserDataCollectionName, RawTrackerHit.class); + // Generate the truth hits. + List stripHits = doSiSimulation(); + List pulserStripHits=makePulserStripHits(rawHits); + if(debug_){ + System.out.println("In SvtDigi:: Current time is = "+ReadoutDataManager.getCurrentTime()); + System.out.println("Number of Sim StripHits for this bunch is "+stripHits.size()); + } + + if(!noPileup) { + // Process each of the pulser hits + for (StripHit pulserHit : pulserStripHits) { + // Get the sensor and channel for the pulser hit. + HpsSiSensor sensor = (HpsSiSensor) pulserHit.sensor; + int channel = pulserHit.channel; + // Queue the hit in the processing queue appropriate + // to its sensor and channel. + PriorityQueue[] pulserHitQueues = pulserHitMap.get(sensor); + if(pulserHitQueues[channel] == null) { + pulserHitQueues[channel] = new PriorityQueue(); + } + pulserHitQueues[channel].add(pulserHit); + } + + // Process each of the truth hits + for (StripHit stripHit : stripHits) { + // Get the sensor and channel for the truth hit. + HpsSiSensor sensor = (HpsSiSensor)stripHit.sensor; + int channel = stripHit.channel; + // Queue the hit in the processing queue appropriate + // to its sensor and channel. + PriorityQueue[] hitQueues = hitMap.get(sensor); + if(hitQueues[channel] == null) { + hitQueues[channel] = new PriorityQueue(); + } + hitQueues[channel].add(stripHit); + } + + // Hits older than a certain time frame should no longer + // be used for pile-up simulation and should be removed + // from the processing queues. + for(SiSensor sensor : sensors) { + // Get the processing queue for the current sensor. + PriorityQueue[] pulserHitQueues = pulserHitMap.get(sensor); + // Check each hit to see if it is still in-time. + for(int i = 0; i < pulserHitQueues.length; i++) { + if(pulserHitQueues[i] != null) { + // Remove old hits. + while(!pulserHitQueues[i].isEmpty() && pulserHitQueues[i].peek().time < ReadoutDataManager.getCurrentTime() - (readoutLatency + pileupCutoff)) { + pulserHitQueues[i].poll(); + } + // If the queue is empty, remove it. + if(pulserHitQueues[i].isEmpty()) { pulserHitQueues[i] = null; } + } + } + + // Get the processing queue for the current sensor. + PriorityQueue[] hitQueues = hitMap.get(sensor); + // Check each hit to see if it is still in-time. + for(int i = 0; i < hitQueues.length; i++) { + if(hitQueues[i] != null) { + // Remove old hits. + while(!hitQueues[i].isEmpty() && hitQueues[i].peek().time < ReadoutDataManager.getCurrentTime() - (readoutLatency + pileupCutoff)) { + hitQueues[i].poll(); + } + // If the queue is empty, remove it. + if(hitQueues[i].isEmpty()) { hitQueues[i] = null; } + } + } + } + } + // Otherwise, process the hits for a no pile-up simulation. + // When no pile-up is simulated, hits are fully processed and + // output on an event-by-event basis. + else { + // Create a list to hold the analog data. + List hits = new ArrayList(); + + // Process each of the truth hits. + for(StripHit stripHit : stripHits) { + // Get the hit parameters. + HpsSiSensor sensor = (HpsSiSensor) stripHit.sensor; + short[] samples = new short[6]; + + // Create a signal buffer and populate it with the + // appropriate pedestal values. + double[] signal = new double[6]; + for(int sampleN = 0; sampleN < 6; sampleN++) { + signal[sampleN] = sensor.getPedestal(stripHit.channel, sampleN); + } + + // If noise should be added, do so. + if(addNoise) { + addNoise(sensor, stripHit.channel, signal); + } + + // Emulate the pulse response and add it to the + // sample array. + for(int sampleN = 0; sampleN < 6; sampleN++) { + double time = sampleN * HPSSVTConstants.SAMPLING_INTERVAL - timeOffset; + shape.setParameters(stripHit.channel, (HpsSiSensor) sensor); + signal[sampleN] += stripHit.amplitude * shape.getAmplitudePeakNorm(time); + samples[sampleN] = (short) Math.round(signal[sampleN]); + } + + // Create raw tracker hits from the sample data. + long channel_id = sensor.makeChannelID(stripHit.channel); + RawTrackerHit hit = new BaseRawTrackerHit(0, channel_id, samples, new ArrayList(stripHit.simHits), sensor); + + // If the analog hit passes the readout cuts, it may + // be added to the data stream. + if(readoutCuts(hit)) { hits.add(hit); } + } + + // Output the processed hits to the LCIO stream. + ReadoutDataManager.addData(outputCollection, hits, RawTrackerHit.class); + } + } + + @Override + public void startOfData() { + // The output collection is only handled by the readout data + // manager if no pile-up simulation is included. Otherwise, + // the driver outputs its own collection at readout. + if(noPileup) { + LCIOCollectionFactory.setCollectionName(outputCollection); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollectionFactory.setFlags(1 << LCIOConstants.TRAWBIT_ID1); + LCIOCollectionFactory.setReadoutName(readout); + LCIOCollection noPileUpCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawTrackerHit.class); + ReadoutDataManager.registerCollection(noPileUpCollectionParams, true, 8.0, 32.0); + } + addDependency(pulserDataCollectionName); + // Define the LCSim on-trigger collection parameters. + LCIOCollectionFactory.setCollectionName(outputCollection); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollectionFactory.setFlags(1 << LCIOConstants.TRAWBIT_ID1); + LCIOCollectionFactory.setReadoutName(readout); + trackerHitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawTrackerHit.class); + + LCIOCollectionFactory.setCollectionName(relationCollection); + LCIOCollectionFactory.setProductionDriver(this); + truthRelationsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(LCRelation.class); + + LCIOCollectionFactory.setCollectionName("TrackerHits"); + LCIOCollectionFactory.setFlags(0xc0000000); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollectionFactory.setReadoutName("TrackerHits"); + truthHitsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(SimTrackerHit.class); + + // Run the superclass method. + super.startOfData(); + } + + /** + * Performs a simulation of silicon sensor response and generates + * a collection of {@link + * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver.StripHit StripHit} + * objects representing the detector response. + * @return Returns a collection of StripHit objects describing + * the detector response for the current event. + */ + private List doSiSimulation() { + // Create a list to store the simulated hit objects. + List stripHits = new ArrayList(); + + // Process each of the SVT sensors. + for(SiSensor sensor : sensors) { + // Set the sensor to be used in the charge deposition + // simulation. + siSimulation.setSensor(sensor); + // Perform the charge deposition simulation. + Map electrodeDataMap = siSimulation.computeElectrodeData(); + + // Iterate over all possible charge carriers. + for(ChargeCarrier carrier : ChargeCarrier.values()) { + // If the sensor is capable of collecting the given + // charge carrier, then obtain the electrode data for + // the sensor. + if(sensor.hasElectrodesOnSide(carrier)) { + // Attempt to obtain electrode data. + SiElectrodeDataCollection electrodeDataCol = electrodeDataMap.get(carrier); + + // If there is no electrode data available create + // a new instance of electrode data. + if(electrodeDataCol == null) { + electrodeDataCol = new SiElectrodeDataCollection(); + } + + // Loop over all sensor channels. + for(Integer channel : electrodeDataCol.keySet()) { + // Get the electrode data for this channel. + SiElectrodeData electrodeData = electrodeDataCol.get(channel); + Set simHits = electrodeData.getSimulatedHits(); + + // Compute hit time as the unweighted average + // of SimTrackerHit times; this is dumb but + // okay since there's generally only one + // SimTrackerHit. + double time = 0.0; + for(SimTrackerHit hit : simHits) { + time += hit.getTime(); + } + time /= simHits.size(); + time += ReadoutDataManager.getCurrentTime(); + + // Get the charge in units of electrons. + double charge = electrodeData.getCharge(); + + // Calculate the amplitude. + double resistorValue = 100; // Ohms + double inputStageGain = 1.5; + // FIXME: This should use the gains instead + double amplitude = (charge / HPSSVTConstants.MIP) * resistorValue * inputStageGain * Math.pow(2, 14) / 2000; + + // Generate a StripHit object containing the + // simulation data and add it to the list. + stripHits.add(new StripHit(sensor, channel, amplitude, time, simHits)); + } + } + } + + // Clear the sensors of all deposited charge + siSimulation.clearReadout(); + } + + // Return the collection of StripHit objects. + return stripHits; + } + + private List makePulserStripHits(Collection rawHits) { + // Create a list to store the simulated hit objects. + List stripHits = new ArrayList(); + for (RawTrackerHit hit: rawHits){ + SiSensor sensor=(SiSensor) hit.getDetectorElement(); + int strip = hit.getIdentifierFieldValue("strip"); + double time=ReadoutDataManager.getCurrentTime(); + stripHits.add(new StripHit(sensor, strip, time, hit)); + } + return stripHits; + } + /** + * Adds a random Gaussian noise signature to the specified signal + * buffer based on the sensor and channel parameters. + * @param sensor - The sensor on which the signal buffer occurs. + * @param channel - The channel on which the signal buffer + * occurs. + * @param signal - The signal buffer. This must be an array of + * size six. + */ + private void addNoise(SiSensor sensor, int channel, double[] signal) { + for(int sampleN = 0; sampleN < 6; sampleN++) { + signal[sampleN] += RandomGaussian.getGaussian(0, ((HpsSiSensor) sensor).getNoise(channel, sampleN)); + } + } + + /** + * Performs each of the three readout cuts, if they are enabled. + * This is the equivalent of calling, as appropriate, the methods + * {@link + * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#samplesAboveThreshold(RawTrackerHit) + * samplesAboveThreshold(RawTrackerHit)}, {@link + * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#pileupCut(RawTrackerHit) + * pileupCut(RawTrackerHit)}, and {@link + * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#badChannelCut(RawTrackerHit) + * badChannelCut(RawTrackerHit)}. + * @param hit - The analog hit to test. + * @return Returns true if all enabled cuts are + * passed, and false otherwise. + */ + private boolean readoutCuts(RawTrackerHit hit) { + // Perform each enabled cut. + if(enableThresholdCut && !samplesAboveThreshold(hit)) { + return false; + } + if(enablePileupCut && !pileupCut(hit)) { + return false; + } + if(dropBadChannels && !badChannelCut(hit)) { + return false; + } + + // If all enabled cuts are passed, return true. + return true; + } + + /** + * Checks whether an analog hit occurred on a "bad" channel. + * @param hit - The hit to be checked. + * @return Returns true if the hit did not + * occur on a bad channel, and false if it did. + */ + private boolean badChannelCut(RawTrackerHit hit) { + HpsSiSensor sensor = (HpsSiSensor) hit.getDetectorElement(); + int channel = hit.getIdentifierFieldValue("strip"); + return !sensor.isBadChannel(channel); + } + + /** + * Attempts to eliminate samples where the pulse starts before + * the sample array. This is done by requiring the second, third, + * and fourth samples of the array to be increasing in value with + * index. + * @param hit - The hit to check. + * @return Returns true if the no pile-up condition + * is met and false if it is not. + */ + private boolean pileupCut(RawTrackerHit hit) { + short[] samples = hit.getADCValues(); + return (samples[2] > samples[1] || samples[3] > samples[2]); + } + + /** + * Attempts to eliminate false hits generated due to noise by + * requiring that a programmable number of samples exceed a + * similarly programmable noise threshold. + * @param hit - The hit to be checked. + * @return Returns true if the noise threshold count + * cut is met and false if it is not. + */ + private boolean samplesAboveThreshold(RawTrackerHit hit) { + // Get the channel and sensor information for the hit. + int channel = hit.getIdentifierFieldValue("strip"); + HpsSiSensor sensor = (HpsSiSensor) hit.getDetectorElement(); + + // Track the noise and pedestal for each sample. + double noise; + double pedestal; + + // Iterate over the samples and count how many are above the + // noise threshold. + int count = 0; + short[] samples = hit.getADCValues(); + for(int sampleN = 0; sampleN < samples.length; sampleN++) { + pedestal = sensor.getPedestal(channel, sampleN); + noise = sensor.getNoise(channel, sampleN); + if(samples[sampleN] - pedestal > noise * noiseThreshold) { + count++; + } + } + + // The cut is passed if enough samples are above the noise + // threshold to pass the minimum count threshold. + return count >= samplesAboveThreshold; + } + + @Override + protected Collection> getOnTriggerData(double triggerTime) { + // No pile-up events are output on an event-by-event basis, + // and as such, do not output anything at this stage. + if(noPileup) { return null; } + // Create a list to hold the analog data + List hits = new ArrayList(); + List truthHits = new ArrayList(); + List trueHitRelations = new ArrayList(); + // Calculate time of first sample + // double firstSample = Math.floor(((triggerTime + 256) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) + // * HPSSVTConstants.SAMPLING_INTERVAL + readoutOffset; + + double firstSample = Math.floor(((triggerTime + 0) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) + * HPSSVTConstants.SAMPLING_INTERVAL + readoutOffset; + + if(debug_){ + System.out.println(this.getClass().getName()+":: trigger time = "+triggerTime); + System.out.println(this.getClass().getName()+":: svt first sample time for trigger = "+firstSample); + } + List processedHits = new ArrayList(); + + for(SiSensor sensor : sensors) { + // Get the hit queues for the current sensor. + PriorityQueue[] hitQueues = hitMap.get(sensor); + PriorityQueue[] pulserHitQueues = pulserHitMap.get(sensor); + + // Iterate over the hit queue channels. + for(int channel = 0; channel < hitQueues.length; channel++) { + // Unless noise should be added, there is nothing to + // process on an empty hit queue. Skip it. + if(!addNoise && (hitQueues[channel] == null || hitQueues[channel].isEmpty()) && (pulserHitQueues[channel] == null || pulserHitQueues[channel].isEmpty())){ + continue; + } + + // Create a buffer to hold the extracted response for + // the channel. + double[] signal = new double[6]; + + //do the pulser hit first...if there is a pulser hit, don't add pedestal or noise to mc hit + boolean hasPulserHit=false; // flag if this channel has a pulser hit + if(pulserHitQueues[channel] != null){ + StripHit ph=pulserHitQueues[channel].poll(); + RawTrackerHit rth=ph.getRawTrackerHit(); + hasPulserHit=true; + short[] samples =rth.getADCValues(); + for(int sampleN = 0; sampleN < 6; sampleN++) { + signal[sampleN] = samples[sampleN]; + } + } + + if(!hasPulserHit){ + // Create a buffer to hold the extracted signal for + // the channel. Populate it with the appropriate + // pedestal values. + + for(int sampleN = 0; sampleN < 6; sampleN++) { + signal[sampleN] = ((HpsSiSensor) sensor).getPedestal(channel, sampleN); + } + + // If noise should be added, do so. + if(addNoise) { + addNoise(sensor, channel, signal); + } + } + + // Create a list to store truth SVT hits. + List simHits = new ArrayList(); + + // If there is data in the mc hit queues, process it. + if(hitQueues[channel] != null) { + if(debug_)System.out.println(this.getClass().getName()+":: data in channel = "+channel); + for(StripHit hit : hitQueues[channel]) { + processedHits.add(hit); + + // Track the noise and contribution to the + // signal from the current hit. + double meanNoise = 0; + double totalContrib = 0; + + // Emulate the pulse response for the hit + // across all size samples. + StringBuffer signalBuffer = new StringBuffer("\t\t\t\tSample Pulse :: ["); + for(int sampleN = 0; sampleN < 6; sampleN++) { + double sampleTime = firstSample + sampleN * HPSSVTConstants.SAMPLING_INTERVAL; + shape.setParameters(channel, (HpsSiSensor) sensor); + double signalAtTime = hit.amplitude * shape.getAmplitudePeakNorm(sampleTime - hit.time); + if(debug_){ + System.out.println(this.getClass().getName()+":: making pulse: sample time = " + +sampleTime+"; hit time = "+hit.time); + System.out.println(this.getClass().getName()+":: signal @ time() = "+signalAtTime); + } + totalContrib += signalAtTime; + signal[sampleN] += signalAtTime; + meanNoise += ((HpsSiSensor) sensor).getNoise(channel, sampleN); + + signalBuffer.append(signalAtTime + " (" + sampleTime + ")"); + if(sampleN != 5) { + signalBuffer.append(" "); + } + } + signalBuffer.append("]"); + + // TODO: Move this to the noise comparison below. + meanNoise /= 6; + + // Calculate the average noise across all + // samples and compare it to the contribution + // from the hit. If it exceeds a the noise + // threshold, store it as a truth hit. + //meanNoise /= 6; + if(totalContrib > 4.0 * meanNoise) { + simHits.addAll(hit.simHits); + } + } + } + + // Convert the samples into a short array, + short[] samples = new short[6]; + for(int sampleN = 0; sampleN < 6; sampleN++) { + samples[sampleN] = (short) Math.round(signal[sampleN]); + } + + // Get the proper channel ID. + long channel_id = ((HpsSiSensor) sensor).makeChannelID(channel); + + // Create a new tracker hit. + RawTrackerHit hit = new BaseRawTrackerHit(0, channel_id, samples, simHits, sensor); + // Only tracker hits that pass the readout cuts may + // be passed through to readout. + if(readoutCuts(hit)) { + // Add the hit to the readout hits collection. + hits.add(hit); + // Associate the truth hits with the raw hit and + // add them to the truth hits collection. + for(SimTrackerHit simHit : hit.getSimTrackerHits()) { + LCRelation hitRelation = new BaseLCRelation(hit, simHit); + trueHitRelations.add(hitRelation); + truthHits.add(simHit); + } + } + } + } + + // Create the collection data objects for output to the + // readout event. + TriggeredLCIOData hitCollection = new TriggeredLCIOData(trackerHitCollectionParams); + hitCollection.getData().addAll(hits); + TriggeredLCIOData truthHitCollection = new TriggeredLCIOData(truthHitsCollectionParams); + truthHitCollection.getData().addAll(truthHits); + TriggeredLCIOData truthRelationCollection = new TriggeredLCIOData(truthRelationsCollectionParams); + truthRelationCollection.getData().addAll(trueHitRelations); + + // MC particles need to be extracted from the truth hits + // and included in the readout data to ensure that the + // full truth chain is available. + Set truthParticles = new java.util.HashSet(); + for(SimTrackerHit simHit : truthHits) { + ReadoutDataManager.addParticleParents(simHit.getMCParticle(), truthParticles); + } + + // Create the truth MC particle collection. + LCIOCollectionFactory.setCollectionName("MCParticle"); + LCIOCollectionFactory.setProductionDriver(this); + LCIOCollection truthParticleCollection = LCIOCollectionFactory.produceLCIOCollection(MCParticle.class); + TriggeredLCIOData truthParticleData = new TriggeredLCIOData(truthParticleCollection); + truthParticleData.getData().addAll(truthParticles); + + // A trigger timestamp needs to be produced as well. + ReadoutTimestamp timestamp = new ReadoutTimestamp(ReadoutTimestamp.SYSTEM_TRACKER, firstSample); + LCIOCollectionFactory.setCollectionName(ReadoutTimestamp.collectionName); + LCIOCollection timestampCollection = LCIOCollectionFactory.produceLCIOCollection(ReadoutTimestamp.class); + TriggeredLCIOData timestampData = new TriggeredLCIOData(timestampCollection); + timestampData.getData().add(timestamp); + + // Store them in a single collection. + Collection> eventOutput = new ArrayList>(5); + eventOutput.add(hitCollection); + eventOutput.add(truthParticleData); + eventOutput.add(truthHitCollection); + eventOutput.add(truthRelationCollection); + eventOutput.add(timestampData); + + // Return the event output. + return eventOutput; + } + + /** + * Class StripHit is responsible for storing several + * parameters defining a simulated hit object. + */ + private class StripHit implements Comparable { + SiSensor sensor; + int channel; + double amplitude; + double time; + Set simHits; + RawTrackerHit pulserHit; + boolean isPulser=false; + + public StripHit(SiSensor sensor, int channel, double amplitude, double time, Set simHits) { + this.sensor = sensor; + this.channel = channel; + this.amplitude = amplitude; + this.time = time; + this.simHits = simHits; + this.isPulser=false; + } + + public StripHit(SiSensor sensor, int channel, double time, RawTrackerHit pulserHit){ + this.sensor = sensor; + this.channel = channel; + this.pulserHit=pulserHit; + this.time=time; + this.isPulser=false; + } + + public boolean getIsPulser(){return this.isPulser;} + public RawTrackerHit getRawTrackerHit(){return this.pulserHit;} + @Override + public int compareTo(Object o) { + double deltaT = time - ((StripHit) o).time; + if(deltaT > 0) { + return 1; + } else if(deltaT < 0) { + return -1; + } else { + return 0; + } + } + } + + @Override + protected double getTimeDisplacement() { + return 0; + } + + @Override + protected double getTimeNeededForLocalOutput() { + // TODO: Probably should have some defined value - buffer seems to be filled enough from the ecal delay alone, though. + return 100; + } + +} diff --git a/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacing.lcsim b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacing.lcsim new file mode 100644 index 000000000..790f8d19b --- /dev/null +++ b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacing.lcsim @@ -0,0 +1,447 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + EcalHits + + + 8.0 + 32.0 + false + + + + MCParticle + + + 32.0 + 32.0 + false + + + + HodoscopeHits + + + 8.0 + 32.0 + false + + + + TrackerHits + + + 8.0 + 32.0 + false + + + + + EcalReadoutHits + + + 32.0 + 32.0 + false + + + + HodoReadoutHits + + + 32.0 + 32.0 + false + + + + + SVTRawTrackerHits + + + + + + SVTRawTrackerHits + + 32.0 + 32.0 + false + + + + + + EcalHits + PulserDataEcalReadoutHits + EcalRawHits + EcalReadoutHits + EcalTruthRelations + TriggerPathTruthRelations + + true + + + 1 + true + + + 48 + + + true + + + false + + + + + + EcalRawHits + EcalCorrectedHits + + true + + + false + + + + + + true + + + true + + + + + HodoscopeHits + HodoscopePreprocessedHits + + true + + + + + + + HodoscopePreprocessedHits + PulserDataHodoReadoutHits + HodoscopeRawHits + HodoscopeReadoutHits + HodoscopeTruthRelations + HodoscopeTriggerPathTruthRelations + + true + + + 1 + false + + + + 32 + + 0.000833333 + + + false + + + false + + + + + + HodoscopeRawHits + HodoscopeCorrectedHits + + true + + + 1 + + false + + + + + + HodoscopeCorrectedHits + HodoscopePatterns + + true + + false + + + + + EcalClustersGTP + + HodoscopePatterns + + true + + + 15 + + + + + EcalClustersGTP + + HodoscopePatterns + + singles2 + + true + + + 15 + + + + + + PulserDataSVTRawTrackerHits + false + true + 0.0 + true + + + + + 200 + ${outputFile}.slcio + + + + + + + + diff --git a/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java b/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java new file mode 100644 index 000000000..81701c4a9 --- /dev/null +++ b/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java @@ -0,0 +1,1296 @@ +package org.hps.recon.tracking.kalman; + +import hep.physics.vec.BasicHep3Vector; +import hep.physics.vec.Hep3Vector; +import hep.physics.vec.VecOp; +import org.hps.util.Pair; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.hps.recon.tracking.FittedRawTrackerHit; +import org.hps.recon.tracking.ShapeFitParameters; +import org.hps.recon.tracking.CoordinateTransformations; +import org.hps.recon.tracking.TrackStateUtils; +import org.hps.recon.tracking.TrackUtils; +import org.lcsim.detector.ITransform3D; +import org.lcsim.detector.tracker.silicon.ChargeCarrier; +import org.lcsim.detector.tracker.silicon.HpsSiSensor; +import org.lcsim.detector.tracker.silicon.SiSensor; +import org.lcsim.event.EventHeader; +import org.lcsim.event.GenericObject; +import org.lcsim.event.LCRelation; +import org.lcsim.event.RawTrackerHit; +import org.lcsim.event.RelationalTable; +import org.lcsim.event.Track; +import org.lcsim.event.base.BaseTrack; +import org.lcsim.event.TrackState; +import org.lcsim.event.base.BaseTrackState; +import org.lcsim.event.TrackerHit; +import org.lcsim.event.base.BaseRelationalTable; +import org.lcsim.fit.helicaltrack.HelicalTrackCross; +import org.lcsim.geometry.Detector; +import org.lcsim.util.Driver; +import org.lcsim.util.aida.AIDA; +import hep.aida.IManagedObject; +import hep.aida.IBaseHistogram; + +import org.lcsim.fit.helicaltrack.HelixUtils; +import org.lcsim.geometry.FieldMap; + + +// E/p plots +import org.lcsim.event.Cluster; +import org.lcsim.event.ReconstructedParticle; +import org.lcsim.event.TrackState; +//Fiducial cuts on the calorimeter cluster +import org.hps.record.triggerbank.TriggerModule; + +/** + * Make post-KF plots + */ +public class KFOutputDriver extends Driver { + + private AIDA aidaKF; // era public + private String outputPlots = "KalmanTrackingPlots.root"; + private String trackCollectionName = "KalmanTracks"; + private String inputCollectionName = "FinalStateParticles_KF"; + private String trackResidualsRelColName = "KFUnbiasResRelations"; + private Map fittedRawTrackerHitMap = new HashMap(); + private String fittedHitsCollectionName = "SVTFittedRawTrackerHits"; + // private String dataRelationCollection = KFKinkData.DATA_RELATION_COLLECTION; + List _fittedHits; + private List sensors = new ArrayList(); + private double bfield; + public boolean debug = false; + private double chi2Cut = 99999; + private double timeOffset=-40.0; //-40ns for MC ,-55 for data (2016 numbers) + //String kinkFolder = "/kf_kinks/"; + String epullFolder = "/err_pulls/"; + String trkpFolder = "/trk_params/"; + String trkpDetailFolder="/trk_detail/"; + String resFolder="/res/"; + String hitFolder="/hit/"; + String eopFolder = "/EoP/"; + // private boolean b_doKFkinks = false; + private boolean b_doKFresiduals = true; + private boolean b_doDetailPlots = false; + private boolean b_doRawHitPlots = true; + //The field map for extrapolation + private FieldMap bFieldMap; + + //The location of the extrapolation + private double bsZ = 0.; + + //Spacing between top and bottom in the 2D histos + private int mod = 5; + + private double minMom = 1.; + private double maxMom = 6.; + + private double minPhi = -999.9; + private double maxPhi = 999.9; + + private double minTanL = 0.015; + private double maxTanL = 999.9; + + private int nHits = 10; + + private boolean useParticles = true; + + private Pair _trkTimeSigma; + + public void setDebug(boolean val) { + debug = val; + } + + public void setTimeOffset(double val){ + timeOffset=val; + } + + public void setUseParticles(boolean val) { + useParticles = val; + } + /* + public void setDataRelationCollection (String val) { + dataRelationCollection = val; + } + */ + public void setNHits (int val ) { + nHits = val; + } + + public void setMinMom (double val) { + minMom = val; + } + + public void setMaxMom (double val) { + maxMom = val; + } + + public void setMinPhi (double val) { + minPhi = val; + } + + public void setMaxPhi (double val) { + maxPhi = val; + } + + public void setMinTanL (double val) { + minTanL = val; + } + + public void setMaxTanL (double val) { + maxTanL = val; + } + + + //Override the Z of the target. + public void setBsZ (double input) { + bsZ = input; + } + + public void setDoKFresiduals (boolean input) { + b_doKFresiduals = input; + } + + // public void setDoKFkinks (boolean input) { + // b_doKFkinks = input; + // } + + public void setTrackResidualsRelColName (String val) { + trackResidualsRelColName = val; + } + + public void setChi2Cut(double input) { + chi2Cut = input; + } + + public void setOutputPlotsFilename(String fname) { + outputPlots = fname; + } + + public void setTrackCollectionName(String val) { + trackCollectionName=val; + } + + public void setInputCollectionName(String val) { + inputCollectionName=val; + } + + + @Override + protected void detectorChanged(Detector detector) { + if (aidaKF == null) + aidaKF = AIDA.defaultInstance(); + + aidaKF.tree().cd("/"); + + for (HpsSiSensor s : detector.getDetectorElement().findDescendants(HpsSiSensor.class)) { + if (s.getName().startsWith("module_") && s.getName().endsWith("sensor0")) { + sensors.add(s); + } + } + + + Hep3Vector fieldInTracker = TrackUtils.getBField(detector); + this.bfield = Math.abs(fieldInTracker.y()); + + bFieldMap = detector.getFieldMap(); + + if (trackCollectionName.contains("Kalman") || trackCollectionName.contains("KF")) { + + // kinkFolder = "/kf_kinks/"; + epullFolder = "/kf_err_pulls/"; + trkpFolder = "/kf_trk_params/"; + trkpDetailFolder = "/kf_trk_detail/"; + resFolder = "/kf_res/"; + hitFolder = "/kf_hit/"; + } + + + + setupPlots(); + setupEoPPlots(); + } + + @Override + public void process(EventHeader event) { + + + + // Track Collection + List tracks = new ArrayList(); + + // Particle Collection + List particles = null; + + // Create a mapping of matched Tracks to corresponding Clusters. + HashMap TrackClusterPairs = new HashMap(); + if(b_doRawHitPlots){ + // Get the list of fitted hits from the event + _fittedHits = event.get(LCRelation.class, fittedHitsCollectionName); + } + int TrackType = 0; + if (!useParticles) { + if (debug) + System.out.println("PF:: DEBUG :: NOT Using particles" + trackCollectionName); + if (trackCollectionName.contains("Kalman") || trackCollectionName.contains("KF")) { + TrackType = 1; + } + } + else { + if (debug) + System.out.println("PF:: DEBUG :: Using particles" + inputCollectionName); + if (inputCollectionName.contains("Kalman") || inputCollectionName.contains("KF")) { + + TrackType = 1 ; + } + + } + if (debug) + System.out.println("PF:: DEBUG :: Track Type=" + TrackType); + + + if (!useParticles) + tracks = event.get(Track.class,trackCollectionName); + else { + particles = event.get(ReconstructedParticle.class, inputCollectionName); + for (ReconstructedParticle particle : particles) { + //this requires track cluster match + if (particle.getTracks().isEmpty() || particle.getClusters().isEmpty()) + continue; + Track track = particle.getTracks().get(0); + Cluster cluster = particle.getClusters().get(0); + tracks.add(track); + TrackClusterPairs.put(track,cluster); + } + } + + int nTracks=tracks.size(); + if(debug) + System.out.println(this.getClass()+":: found "+nTracks + " tracks"); + aidaKF.histogram1D(trkpFolder+"nTracks").fill(nTracks); + RelationalTable hitToStrips = TrackUtils.getHitToStripsTable(event); + RelationalTable hitToRotated = TrackUtils.getHitToRotatedTable(event); + + for (Track trk : tracks) { + + if (trk.getChi2() > chi2Cut) + continue; + + if (trk.getTrackerHits().size() < nHits) + continue; + + + if(debug) + System.out.println("Track passed hits d0 = "+trk.getTrackStates().get(0).getD0()); + + Hep3Vector momentum = new BasicHep3Vector(trk.getTrackStates().get(0).getMomentum()); + if (momentum.magnitude() < minMom) + continue; + + if (momentum.magnitude() > maxMom) + continue; + + if(debug) + System.out.println("Track passed momentum"); + + TrackState trackState = trk.getTrackStates().get(0); + if (Math.abs(trackState.getTanLambda()) < minTanL) + continue; + + if (Math.abs(trackState.getTanLambda()) > maxTanL) + continue; + + if (Math.abs(trackState.getPhi()) < minPhi) + continue; + + if (Math.abs(trackState.getPhi()) > maxPhi) + continue; + + if(debug) + System.out.println("Track passed tanLambda"); + + + Map sensorHits = new HashMap(); + + for (TrackerHit hit : trk.getTrackerHits()) { + HpsSiSensor sensor = ((HpsSiSensor) ((RawTrackerHit) hit.getRawHits().get(0)).getDetectorElement()); + if (sensor != null) { + sensorHits.put(sensor, hit); + } + + if (debug && sensor == null) + System.out.printf("TrackerHit null sensor %s \n", hit.toString()); + } + _trkTimeSigma=getTrackTime(sensorHits); + doBasicKFtrack(trk,sensorHits); + if (b_doKFresiduals) + doKFresiduals(trk, sensorHits,event); + + // if (b_doGBLkinks) + // doGBLkinks(trk,gblKink, sensorNums); + + if (useParticles) + doEoPPlots(trk,TrackClusterPairs.get(trk)); + + + } + } + + private void doEoPPlots(Track track, Cluster cluster) { + + double energy = cluster.getEnergy(); + double[] trk_prms = track.getTrackParameters(); + double tanL = trk_prms[BaseTrack.TANLAMBDA]; + double phi = trk_prms[BaseTrack.PHI]; + TrackState trackState = track.getTrackStates().get(0); + double trackp = new BasicHep3Vector(trackState.getMomentum()).magnitude(); + double eop = energy / trackp; + + String vol = tanL > 0 ? "top" : "bottom"; + + //Charge sign is flipped + String charge = track.getCharge() > 0 ? "ele" : "pos"; + + + aidaKF.histogram1D(eopFolder+"Ecluster_"+vol).fill(energy); + aidaKF.histogram1D(eopFolder+"EoP_"+vol).fill(eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_phi_"+vol).fill(phi,eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_trackP_"+vol).fill(trackp,eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda_"+vol).fill(tanL,eop); + Double trackTime = _trkTimeSigma.getFirstElement(); + double trkCluTime=trackTime-cluster.getCalorimeterHits().get(0).getTime()-timeOffset; + aidaKF.histogram1D(trkpFolder+"trk-cluTime_"+charge+"_"+vol).fill(trkCluTime); + aidaKF.histogram1D(trkpFolder+"trk-cluTime_"+vol).fill(trkCluTime); + + + aidaKF.histogram2D(eopFolder+"EoP_vs_trackP_"+charge+"_"+vol).fill(trackp,eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda_"+charge+"_"+vol).fill(tanL,eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_phi_"+charge+"_"+vol).fill(phi,eop); + + aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda").fill(tanL,eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_phi").fill(phi,eop); + aidaKF.histogram3D(eopFolder+"EoP_vs_tanLambda_phi").fill(tanL, + phi, + eop); + + + if (TriggerModule.inFiducialRegion(cluster)) { + + aidaKF.histogram1D(eopFolder+"Ecluster_"+vol+"_fid").fill(energy); + aidaKF.histogram1D(eopFolder+"EoP_"+vol+"_fid").fill(eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_phi_"+vol+"_fid").fill(phi,eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_trackP_"+vol+"_fid").fill(trackp,eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda_"+vol+"_fid").fill(tanL,eop); + + + aidaKF.histogram2D(eopFolder+"EoP_vs_trackP_"+charge+"_"+vol+"_fid").fill(trackp,eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda_"+charge+"_"+vol+"_fid").fill(tanL,eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_phi_"+charge+"_"+vol+"_fid").fill(phi,eop); + + aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda_fid").fill(tanL,eop); + aidaKF.histogram2D(eopFolder+"EoP_vs_phi_fid").fill(phi,eop); + aidaKF.histogram3D(eopFolder+"EoP_vs_tanLambda_phi_fid").fill(tanL, + phi, + eop); + + + + // Cluster positions + + double clusterX = cluster.getPosition()[0]; + double clusterY = cluster.getPosition()[1]; + TrackState ts_ecal = TrackUtils.getTrackStateAtECal(track); + + if(ts_ecal == null){ + return; + } + + double[] ts_ecalPos = ts_ecal.getReferencePoint(); + double trkX = ts_ecalPos[1]; + double trkY = ts_ecalPos[2]; + + aidaKF.histogram1D(eopFolder+"Xcluster_"+vol+"_fid").fill(clusterX); + aidaKF.histogram1D(eopFolder+"Ycluster_"+vol+"_fid").fill(clusterY); + + aidaKF.histogram1D(eopFolder+"trk_clu_resX_"+vol+"_fid").fill(trkX-clusterX); + aidaKF.histogram1D(eopFolder+"trk_clu_resY_"+vol+"_fid").fill(trkY-clusterY); + + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vsX_"+vol+"_fid").fill(trkX,trkX-clusterX); + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vsY_"+vol+"_fid").fill(trkY,trkX-clusterX); + + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vsX_"+vol+"_fid").fill(trkX,trkY-clusterY); + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vsY_"+vol+"_fid").fill(trkY,trkY-clusterY); + + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vstrkP_"+vol+"_fid").fill(trackp,trkY-clusterY); + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vstrkP_"+vol+"_fid").fill(trackp,trkX-clusterX); + + aidaKF.histogram2D(eopFolder+"trkY_vs_tanL_"+vol+"_fid").fill(tanL,trkY); + + + aidaKF.histogram1D(eopFolder+"Xcluster_"+charge+"_"+vol+"_fid").fill(clusterX); + aidaKF.histogram1D(eopFolder+"Ycluster_"+charge+"_"+vol+"_fid").fill(clusterY); + + aidaKF.histogram1D(eopFolder+"trk_clu_resX_"+charge+"_"+vol+"_fid").fill(trkX-clusterX); + aidaKF.histogram1D(eopFolder+"trk_clu_resY_"+charge+"_"+vol+"_fid").fill(trkY-clusterY); + + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vsX_"+charge+"_"+vol+"_fid").fill(trkX,trkX-clusterX); + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vsY_"+charge+"_"+vol+"_fid").fill(trkY,trkX-clusterX); + + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vsX_"+charge+"_"+vol+"_fid").fill(trkX,trkY-clusterY); + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vsY_"+charge+"_"+vol+"_fid").fill(trkY,trkY-clusterY); + + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vstrkP_"+charge+"_"+vol+"_fid").fill(trackp,trkY-clusterY); + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vstrkP_"+charge+"_"+vol+"_fid").fill(trackp,trkX-clusterX); + + aidaKF.histogram2D(eopFolder+"trkY_vs_tanL_"+charge+"_"+vol+"_fid").fill(tanL,trkY); + + + // + + // As function of incident angle at ECAL, inclusive and in bin of momentum. + + + + + + } + + + } + + + + /* + private void doKFkinks(Track trk, GenericObject kink, Map sensorNums) { + + if (kink == null) { + System.out.println("WARNING::Kink object is null"); + return; + } + + + String vol = "_top"; + int spacing = 0; + if (trk.getTrackStates().get(0).getTanLambda() < 0) { + vol = "_bottom"; + spacing = sensors.size() / 2 + mod; + } + + for (HpsSiSensor sensor : sensorNums.keySet()) { + int index = sensorNums.get(sensor); + double phi = kink.getDoubleVal(index); + float lambda = kink.getFloatVal(index); + + //(2019) For top 0-20, for bottom 25-45 + aidaKF.histogram2D(kinkFolder+"lambda_kink_mod").fill(sensor.getMillepedeId()+spacing,lambda); + aidaKF.profile1D(kinkFolder+"lambda_kink_mod_p").fill(sensor.getMillepedeId()+spacing,lambda); + aidaKF.histogram2D(kinkFolder+"phi_kink_mod").fill(sensor.getMillepedeId()+spacing,phi); + aidaKF.profile1D(kinkFolder+"phi_kink_mod_p").fill(sensor.getMillepedeId()+spacing,phi); + aidaKF.histogram1D(kinkFolder+"lambda_kink_" + sensor.getName()).fill(lambda); + aidaKF.histogram1D(kinkFolder+"phi_kink_" + sensor.getName()).fill(phi); + } + + } + + private void doMTresiduals(Track trk, Map sensorHits) { + TrackState trackState = trk.getTrackStates().get(0); + for (HpsSiSensor sensor : sensorHits.keySet()) { + Hep3Vector extrapPos = TrackStateUtils.getLocationAtSensor(trackState, sensor, bfield); + Hep3Vector hitPos = new BasicHep3Vector(sensorHits.get(sensor).getPosition()); + if (hitPos == null || extrapPos == null) + return; + Hep3Vector diff = VecOp.sub(extrapPos, hitPos); + if (debug) + System.out.printf("MextrapPos %s MhitPos %s \n Mdiff %s ", extrapPos.toString(), hitPos.toString(), diff.toString()); + + ITransform3D trans = sensor.getGeometry().getGlobalToLocal(); + trans.rotate(diff); + + aidaKF.histogram1D(resFolder+"residual_before_KF_" + sensor.getName()).fill(diff.x()); + if (debug) + System.out.printf("MdiffSensor %s \n", diff.toString()); + + } + } + */ + private void FillKFTrackPlot(String str, String isTop, String charge, double val) { + aidaKF.histogram1D(str+isTop).fill(val); + aidaKF.histogram1D(str+isTop+charge).fill(val); + } + + private void FillKFTrackPlot(String str, String isTop, String charge, double valX, double valY) { + aidaKF.histogram2D(str+isTop).fill(valX,valY); + aidaKF.histogram2D(str+isTop+charge).fill(valX,valY); + } + + private void FillKFTrackPlot(String str, String isTop, String charge, double valX, double valY, double valZ) { + aidaKF.histogram3D(str+isTop).fill(valX,valY,valZ); + aidaKF.histogram3D(str+isTop+charge).fill(valX,valY,valZ); + } + + + + private void doBasicKFtrack(Track trk, Map sensorHits) { + + TrackState trackState = trk.getTrackStates().get(0); + + String isTop = "_bottom"; + //if (trk.getTrackerHits().get(0).getPosition()[2] > 0) { + // isTop = "_top"; + //} + + //if (trk.getType()==1 && trk.getTrackerHits().size() < 10) { + // return; + //} + + //List missingHits; + //missingHits = findMissingLayer(trk); + + if (trackState.getTanLambda() > 0) { + isTop = "_top"; + } + + //There is a sign flip in the charge + String charge = "_pos"; + if (trk.getCharge()>0) + charge = "_neg"; + + + //Hep3Vector mom = new BasicHep3Vector(trackState.getMomentum()); + //System.out.println("Track momentum " + mom.toString()); + double trackp = new BasicHep3Vector(trackState.getMomentum()).magnitude(); + + + FillKFTrackPlot(trkpFolder+"d0",isTop,charge,trackState.getD0()); + FillKFTrackPlot(trkpFolder+"z0",isTop,charge,trackState.getZ0()); + FillKFTrackPlot(trkpFolder+"phi",isTop,charge,trackState.getPhi()); + FillKFTrackPlot(trkpFolder+"tanLambda",isTop,charge,trackState.getTanLambda()); + FillKFTrackPlot(trkpFolder+"p",isTop,charge,trackp); + if (trk.getTrackerHits().size()==7) + FillKFTrackPlot(trkpFolder+"p7h",isTop,charge,trackp); + if (trk.getTrackerHits().size()==6) + FillKFTrackPlot(trkpFolder+"p6h",isTop,charge,trackp); + if (trk.getTrackerHits().size()==5) + FillKFTrackPlot(trkpFolder+"p5h",isTop,charge,trackp); + + if (TrackUtils.isHoleTrack(trk)) + FillKFTrackPlot(trkpFolder+"p_hole",isTop,charge,trackp); + else + FillKFTrackPlot(trkpFolder+"p_slot",isTop,charge,trackp); + Double trackTime = _trkTimeSigma.getFirstElement(); + Double trackTimeSD = _trkTimeSigma.getSecondElement(); + + //fill track time and standard dev + FillKFTrackPlot(trkpFolder+"trkTime",isTop,charge,trackTime); + FillKFTrackPlot(trkpFolder+"trkTimeSD",isTop,charge,trackTimeSD); + + //Momentum maps + FillKFTrackPlot(trkpFolder+"p_vs_phi",isTop,charge,trackState.getPhi(),trackp); + FillKFTrackPlot(trkpFolder+"p_vs_tanLambda",isTop,charge,trackState.getTanLambda(),trackp); + FillKFTrackPlot(trkpFolder+"p_vs_phi_tanLambda",isTop,charge,trackState.getPhi(),trackState.getTanLambda(),trackp); + + double tanLambda = trackState.getTanLambda(); + double cosLambda = 1. / (Math.sqrt(1+tanLambda*tanLambda)); + + FillKFTrackPlot(trkpFolder+"pT_vs_phi",isTop,charge,trackState.getPhi(),trackp*cosLambda); + FillKFTrackPlot(trkpFolder+"pT_vs_tanLambda",isTop,charge,trackState.getTanLambda(),trackp*cosLambda); + + + //if (trk.getTrackerHits().size()==6) + // FillKFTrackPlot(trkpFolder+"p_Missing1Hit",isTop,charge,missingHits.get(0),trackp); + + //if (missingHits.size()==1 && missingHits.get(0)==7) + // FillKFTrackPlot(trkpFolder+"p_MissingLastLayer",isTop,charge,trackp); + + FillKFTrackPlot(trkpFolder+"Chi2",isTop,charge,trk.getChi2()); + FillKFTrackPlot(trkpFolder+"Chi2oNDF",isTop,charge,trk.getChi2() / trk.getNDF()); + FillKFTrackPlot(trkpFolder+"Chi2_vs_p",isTop,charge,trackp,trk.getChi2()); + + int nhits = trk.getTrackerHits().size(); + + aidaKF.histogram1D(trkpFolder+"nHits" + isTop).fill(nhits); + aidaKF.histogram1D(trkpFolder+"nHits" + isTop+charge).fill(nhits); + + Hep3Vector beamspot = CoordinateTransformations.transformVectorToDetector(TrackUtils.extrapolateHelixToXPlane(trackState, 0)); + if (debug) + System.out.printf("beamspot %s transformed \n", beamspot.toString()); + FillKFTrackPlot(trkpFolder+"trk_extr_or_x",isTop,charge,beamspot.x()); + FillKFTrackPlot(trkpFolder+"trk_extr_or_y",isTop,charge,beamspot.y()); + + //Extrapolation to assumed tgt pos - helix + Hep3Vector trkTgt = CoordinateTransformations.transformVectorToDetector(TrackUtils.extrapolateHelixToXPlane(trackState,bsZ)); + FillKFTrackPlot(trkpFolder+"trk_extr_bs_x",isTop,charge,trkTgt.x()); + FillKFTrackPlot(trkpFolder+"trk_extr_bs_y",isTop,charge,trkTgt.y()); + + //Transform z to the beamspot plane + //Get the PathToPlane + + BaseTrackState ts_bs = TrackUtils.getTrackExtrapAtVtxSurfRK(trackState,bFieldMap,0.,bsZ); + + + //Get the track parameters wrt the beamline using helix + double [] beamLine = new double [] {bsZ,0}; + double [] helixParametersAtBS = TrackUtils.getParametersAtNewRefPoint(beamLine, trackState); + + + FillKFTrackPlot(trkpFolder+"trk_extr_bs_x_rk",isTop,charge,ts_bs.getReferencePoint()[1]); + FillKFTrackPlot(trkpFolder+"trk_extr_bs_y_rk",isTop,charge,ts_bs.getReferencePoint()[2]); + + //Ill defined - should be defined wrt bsX and bsY + FillKFTrackPlot(trkpFolder+"d0_vs_bs_rk",isTop,charge,ts_bs.getD0()); + FillKFTrackPlot(trkpFolder+"d0_vs_bs_extrap",isTop,charge,helixParametersAtBS[BaseTrack.D0]); + + double s = HelixUtils.PathToXPlane(TrackUtils.getHTF(trackState),bsZ,0.,0).get(0); + FillKFTrackPlot(trkpFolder+"z0_vs_bs",isTop,charge,trackState.getZ0() + s*trackState.getTanLambda()); + FillKFTrackPlot(trkpFolder+"z0_vs_bs_rk",isTop,charge,ts_bs.getZ0()); + FillKFTrackPlot(trkpFolder+"z0_vs_bs_extrap",isTop,charge,helixParametersAtBS[BaseTrack.Z0]); + + + FillKFTrackPlot(trkpFolder+"phi_vs_bs_extrap",isTop,charge,helixParametersAtBS[BaseTrack.PHI]); + + //TH2D - Filling + FillKFTrackPlot(trkpFolder+"d0_vs_phi",isTop,charge,trackState.getPhi(),trackState.getD0()); + FillKFTrackPlot(trkpFolder+"d0_vs_tanLambda",isTop,charge,trackState.getTanLambda(),trackState.getD0()); + FillKFTrackPlot(trkpFolder+"d0_vs_p",isTop,charge,trackp,trackState.getD0()); + + //Ill defined - should be defined wrt bsX and bsY + FillKFTrackPlot(trkpFolder+"d0bs_vs_p",isTop,charge,trackp,helixParametersAtBS[BaseTrack.D0]); + + FillKFTrackPlot(trkpFolder+"z0_vs_p",isTop,charge,trackp,trackState.getZ0()); + FillKFTrackPlot(trkpFolder+"z0bs_vs_p",isTop,charge,trackp,ts_bs.getZ0()); + + //Interesting plot to get a sense where z-vtx is. + //If z0 is referenced to the right BS z location, the slope of vs tanLambda is 0 + FillKFTrackPlot(trkpFolder+"z0_vs_tanLambda",isTop,charge,trackState.getTanLambda(),trackState.getZ0()); + FillKFTrackPlot(trkpFolder+"z0bs_vs_tanLambda",isTop,charge,trackState.getTanLambda(),ts_bs.getZ0()); + + + if(b_doRawHitPlots){ + + // Map the fitted hits to their corresponding raw hits + this.mapFittedRawHits(_fittedHits); + + for(TrackerHit tkh: trk.getTrackerHits()){ + List rawhits = tkh.getRawHits(); + for(RawTrackerHit rth: rawhits){ + //need the rth->fited + HpsSiSensor sensor = (HpsSiSensor) rth.getDetectorElement(); + double t0 = FittedRawTrackerHit.getT0(getFittedHit(rth)); + double amplitude = FittedRawTrackerHit.getAmp(getFittedHit(rth)); + double chi2Prob = ShapeFitParameters.getChiProb(FittedRawTrackerHit.getShapeFitParameters(getFittedHit(rth))); + aidaKF.histogram1D(hitFolder+"raw_hit_t0_"+sensor.getName()).fill(t0); + aidaKF.histogram1D(hitFolder+"raw_hit_amplitude_"+sensor.getName()).fill(amplitude); + aidaKF.histogram1D(hitFolder+"raw_hit_chisq_"+sensor.getName()).fill(chi2Prob); + + } + } + } + if (b_doDetailPlots) { + int ibins = 15; + double start= -12; + double end = -5; + double step = (end-start) / (double)ibins; + + for (int ibin = 0; ibin sensorHits, EventHeader event) { + + Map sensorMPIDs = new HashMap(); + + for (HpsSiSensor sensor : sensorHits.keySet()) { + //Also fill here the sensorMPIDs map + sensorMPIDs.put(sensor.getMillepedeId(),sensor); + ITransform3D trans = sensor.getGeometry().getGlobalToLocal(); + + // position of hit (track crossing the sensor before kf extrapolation) + // the hit information available on each sensor is meaningful only along the measurement direction, + // Hep3Vector hitPos = new BasicHep3Vector(sensorHits.get(sensor).getPosition()); + // instead: extract the information of the hit of the track at the sensor position before kf + TrackState trackState = trk.getTrackStates().get(0); + Hep3Vector hitTrackPos = TrackStateUtils.getLocationAtSensor(trackState, sensor, bfield); + + if (hitTrackPos == null) { + if (debug) { + System.out.printf(this.getClass().getName()+"::doKFresiduals:: hitTrackPos is null to sensor %s\n", sensor.toString()); + } + continue; + } + + Hep3Vector hitTrackPosSensor = new BasicHep3Vector(hitTrackPos.v()); + trans.transform(hitTrackPosSensor); + // after the transformation x and y in the sensor frame are reversed + // This plot is ill defined. + + aidaKF.histogram2D(hitFolder+"hit_u_vs_v_sensor_frame_" + sensor.getName()).fill(hitTrackPosSensor.y(), hitTrackPosSensor.x()); + //aidaKF.histogram2D("hit_u_vs_v_sensor_frame_" + sensor.getName()).fill(hitPos.y(), hitPos.x()); + //aidaKF.histogram2D("hit y vs x lab-frame " + sensor.getName()).fill(hitPos.y(), hitPos.x()); + + + // position predicted on track after KF + Hep3Vector extrapPos = null; + Hep3Vector extrapPosSensor = null; + extrapPos = TrackUtils.extrapolateTrackPositionToSensor(trk, sensor, sensors, bfield); + if (extrapPos == null) + return; + extrapPosSensor = new BasicHep3Vector(extrapPos.v()); + trans.transform(extrapPosSensor); + //aidaKF.histogram2D("residual after KF vs u predicted " + sensor.getName()).fill(extrapPosSensor.x(), res); + aidaKF.histogram2D(hitFolder+"predicted_u_vs_v_sensor_frame_" + sensor.getName()).fill(extrapPosSensor.y(), extrapPosSensor.x()); + // select track charge + if(trk.getCharge()>0) { + aidaKF.histogram2D(hitFolder+"predicted_u_vs_v_pos_sensor_frame_" + sensor.getName()).fill(extrapPosSensor.y(), extrapPosSensor.x()); + }else if(trk.getCharge()<0) { + aidaKF.histogram2D(hitFolder+"predicted_u_vs_v_neg_sensor_frame_" + sensor.getName()).fill(extrapPosSensor.y(), extrapPosSensor.x()); + } + + // post-KF residual + Hep3Vector hitPos = new BasicHep3Vector(sensorHits.get(sensor).getPosition()); + Hep3Vector hitPosSensor = new BasicHep3Vector(hitPos.v()); + trans.transform(hitPosSensor); + Hep3Vector resSensor = VecOp.sub(hitPosSensor, extrapPosSensor); + aidaKF.histogram2D(resFolder+"residual_after_KF_vs_v_predicted_" + sensor.getName()).fill(extrapPosSensor.y(), resSensor.x()); + aidaKF.histogram2D(resFolder+"residual_after_KF_vs_u_hit_" + sensor.getName()).fill(hitPosSensor.x(), resSensor.x()); + aidaKF.histogram1D(resFolder+"residual_after_KF_" + sensor.getName()).fill(resSensor.x()); + + + + if (debug) { + System.out.printf("hitPos %s hitPosSensor %s \n", hitPos.toString(), hitPosSensor.toString()); + System.out.printf("resSensor %s \n", resSensor.toString()); + System.out.printf("extrapPos %s extrapPosSensor %s \n", extrapPos.toString(), extrapPosSensor.toString()); + ITransform3D electrodes_to_global = sensor.getReadoutElectrodes(ChargeCarrier.HOLE).getLocalToGlobal(); + Hep3Vector measuredCoordinate = sensor.getReadoutElectrodes(ChargeCarrier.HOLE).getMeasuredCoordinate(0); + Hep3Vector unmeasuredCoordinate = sensor.getReadoutElectrodes(ChargeCarrier.HOLE).getUnmeasuredCoordinate(0); + System.out.printf("unMeasCoordOrig %s MeasCoordOrig %s \n", unmeasuredCoordinate.toString(), measuredCoordinate.toString()); + measuredCoordinate = VecOp.mult(VecOp.mult(CoordinateTransformations.getMatrix(), electrodes_to_global.getRotation().getRotationMatrix()), measuredCoordinate); + unmeasuredCoordinate = VecOp.mult(VecOp.mult(CoordinateTransformations.getMatrix(), electrodes_to_global.getRotation().getRotationMatrix()), unmeasuredCoordinate); + Hep3Vector testX = trans.inverse().rotated(new BasicHep3Vector(1, 0, 0)); + Hep3Vector testY = trans.inverse().rotated(new BasicHep3Vector(0, 1, 0)); + Hep3Vector testZ = trans.inverse().rotated(new BasicHep3Vector(0, 0, 1)); + System.out.printf("unMeasCoord %s MeasCoord %s \n transX %s transY %s transZ %s \n", unmeasuredCoordinate.toString(), measuredCoordinate.toString(), testX.toString(), testY.toString(), testZ.toString()); + } + }//loop on sensor hits + + Double trackTime = _trkTimeSigma.getFirstElement(); + Double trackTimeSD = _trkTimeSigma.getSecondElement(); + /* + trackTime /= (float)sensorHits.size(); + + for (HpsSiSensor sensor : sensorHits.keySet()) { + trackTimeSD += Math.pow(trackTime - sensorHits.get(sensor).getTime(),2); + } + + trackTimeSD = Math.sqrt(trackTimeSD / ((float) sensorHits.size() - 1.)); + */ + + + RelationalTable trackResidualsTable = null; + if (event.hasCollection(LCRelation.class, trackResidualsRelColName)) { + trackResidualsTable = new BaseRelationalTable(RelationalTable.Mode.ONE_TO_ONE, RelationalTable.Weighting.UNWEIGHTED); + List trackresRelation = event.get(LCRelation.class, trackResidualsRelColName); + for (LCRelation relation : trackresRelation) { + if (relation != null && relation.getFrom() != null && relation.getTo() != null) { + trackResidualsTable.add(relation.getFrom(), relation.getTo()); + } + } + if (debug) + System.out.println("Loaded track Residuals Table"); + } else { + if (debug) { + System.out.println("null TrackResidualsKF Data Relations."); + } + //Failed finding TrackResidualsKF + return; + } + + GenericObject trackRes = (GenericObject) trackResidualsTable.from(trk); + if (trackRes == null) { + if (debug) + System.out.println("null TrackResidualsKF Data."); + return; + } + + int nres = (trackRes.getNInt()-1); + //int nres = trk.getTrackerHits().size(); + String vol = "_top"; + if (trk.getTrackStates().get(0).getTanLambda() < 0) + vol = "_bottom"; + // get the unbias + for (int i_hit =0; i_hit < nres ; i_hit+=1) { + if (trackRes.getIntVal(i_hit)!=-999) { + //Measured hit + HpsSiSensor hps_sensor = sensorMPIDs.get(trackRes.getIntVal(i_hit)); + Hep3Vector hitPosG = new BasicHep3Vector(sensorHits.get(hps_sensor).getPosition()); + Hep3Vector hitPosSensorG = new BasicHep3Vector(hitPosG.v()); + ITransform3D g2l = hps_sensor.getGeometry().getGlobalToLocal(); + g2l.transform(hitPosSensorG); + String sensorName = (sensorMPIDs.get(trackRes.getIntVal(i_hit))).getName(); + + //Predicted hit + Hep3Vector extrapPos = null; + Hep3Vector extrapPosSensor = null; + extrapPos = TrackUtils.extrapolateTrackPositionToSensor(trk, hps_sensor, sensors, bfield); + if (extrapPos == null) + continue; + extrapPosSensor = new BasicHep3Vector(extrapPos.v()); + g2l.transform(extrapPosSensor); + + if (debug) { + System.out.printf("NHits %d MPID sensor:%d %s %d\n", nres,trackRes.getIntVal(i_hit), sensorName,i_hit); + System.out.printf("Track uresiduals: %s %.5f %.5f\n",sensorName, trackRes.getDoubleVal(i_hit),trackRes.getFloatVal(i_hit)); + } + + //General residuals Per volume + aidaKF.histogram1D(resFolder+"uresidual_KF"+vol).fill(trackRes.getDoubleVal(i_hit)); + + if (trackRes.getIntVal(i_hit) < 9) + //L1L4 + aidaKF.histogram1D(resFolder+"uresidual_KF"+vol+"_L1L4").fill(trackRes.getDoubleVal(i_hit)); + else + //L5L7 + aidaKF.histogram1D(resFolder+"uresidual_KF"+vol+"_L5L7").fill(trackRes.getDoubleVal(i_hit)); + + + //Top go from 0 to 20, bottom go from 25 to 45 + int spacing = 0; + if (vol == "_bottom") + spacing = sensors.size()/2 + mod; + + aidaKF.histogram2D(resFolder+"uresidual_KF_mod").fill(trackRes.getIntVal(i_hit)+spacing,trackRes.getDoubleVal(i_hit)); + aidaKF.profile1D(resFolder+"uresidual_KF_mod_p").fill(trackRes.getIntVal(i_hit)+spacing,trackRes.getDoubleVal(i_hit)); + aidaKF.histogram1D(resFolder+"uresidual_KF_" + sensorName).fill(trackRes.getDoubleVal(i_hit)); + aidaKF.histogram2D(resFolder+"uresidual_KF_vs_u_hit_" + sensorName).fill(hitPosSensorG.x(),trackRes.getDoubleVal(i_hit)); + aidaKF.histogram2D(resFolder+"uresidual_KF_vs_v_pred_" + sensorName).fill(extrapPosSensor.y(),trackRes.getDoubleVal(i_hit)); + aidaKF.histogram1D(epullFolder+"ureserror_KF_" + sensorName).fill(trackRes.getFloatVal(i_hit)); + aidaKF.histogram1D(epullFolder+"ures_pull_KF_" + sensorName).fill(trackRes.getDoubleVal(i_hit) / trackRes.getFloatVal(i_hit)); + + //Get the hit time + double hitTime = sensorHits.get(hps_sensor).getTime(); + + //Get the track time (it's the average of hits-on-track time) + + double dT_hit_track = hitTime - trackTime; + double dT_hit_sigma = (hitTime - trackTime) / trackTimeSD; + + aidaKF.histogram2D(resFolder+"uresidual_KF_vs_dT_hit_"+sensorName).fill(dT_hit_track,trackRes.getDoubleVal(i_hit)); + aidaKF.histogram2D(resFolder+"uresidual_KF_vs_dTs_hit_"+sensorName).fill(dT_hit_sigma,trackRes.getDoubleVal(i_hit)); + + + + + } + else { + if (debug){ + System.out.printf("Track refit failed? No biased residual for %d\n", i_hit); + } + } + } + }//doKFresiduals + + private List findMissingLayer(Track trk) { + + List layers = new ArrayList(); + layers.add(1); + layers.add(2); + layers.add(3); + layers.add(4); + layers.add(5); + layers.add(6); + layers.add(7); + + List LayersOnTrack = new ArrayList(); + List missingHits = new ArrayList(); + + for (TrackerHit hit : trk.getTrackerHits()) { + int stripLayer = ((HpsSiSensor) ((RawTrackerHit) hit.getRawHits().get(0)).getDetectorElement()).getLayerNumber(); + // int hpslayer = (stripLayer + 1 ) / 2; + LayersOnTrack.add(stripLayer); + } + for (Integer layer : layers) { + if (!LayersOnTrack.contains(layer)) + missingHits.add(layer); + } + return missingHits; + } + + private void setupEoPPlots() { + + List volumes = new ArrayList(); + volumes.add("_top"); + volumes.add("_bottom"); + + List charges = new ArrayList(); + charges.add(""); + charges.add("_ele"); + charges.add("_pos"); + + for (String vol : volumes) { + + aidaKF.histogram1D(eopFolder+"Ecluster"+vol,200,0,6); + aidaKF.histogram1D(eopFolder+"EoP"+vol,200,0,2); + + double lmin = 0.; + double lmax = 0.08; + if (vol == "_bot") { + lmin = -0.08; + lmax = 0.; + } + + for (String charge : charges) { + aidaKF.histogram2D(eopFolder+"EoP_vs_trackP"+charge+vol,200,0,6,200,0,2); + aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda"+charge+vol,200,lmin,lmax,200,0,2); + aidaKF.histogram2D(eopFolder+"EoP_vs_phi"+charge+vol,200,-0.2,0.2,200,0,2); + } + + aidaKF.histogram1D(eopFolder+"Ecluster"+vol+"_fid",200,0,5); + aidaKF.histogram1D(eopFolder+"EoP"+vol+"_fid",200,0,2); + aidaKF.histogram2D(eopFolder+"EoP_vs_trackP"+vol+"_fid",200,0,6,200,0,2); + + + double cxrange = 20; + double cyrange = 20; + double ecalX = 400; + + aidaKF.histogram1D(eopFolder+"Xcluster"+vol+"_fid",200,-ecalX,ecalX); + aidaKF.histogram1D(eopFolder+"Ycluster"+vol+"_fid",200,-ecalX,ecalX); + aidaKF.histogram1D(eopFolder+"trk_clu_resX"+vol+"_fid",200,-cxrange,cxrange); + aidaKF.histogram1D(eopFolder+"trk_clu_resY"+vol+"_fid",200,-cyrange,cyrange); + + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vsX"+vol+"_fid",200,-ecalX,ecalX,200,-cxrange,cxrange); + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vsY"+vol+"_fid",200,-ecalX,ecalX,200,-cxrange,cxrange); + + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vsX"+vol+"_fid",200,-ecalX,ecalX,200,-cyrange,cyrange); + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vsY"+vol+"_fid",200,-ecalX,ecalX,200,-cyrange,cyrange); + + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vstrkP"+vol+"_fid",100,0.,5,200,-cyrange,cyrange); + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vstrkP"+vol+"_fid",100,0.,5,200,-cyrange,cyrange); + + aidaKF.histogram2D(eopFolder+"trkY_vs_tanL"+vol+"_fid",200,-0.2,0.2,200,-100,100); + + + for (String charge : charges) { + + //put the trk-cluster time in trkpFolder + aidaKF.histogram1D(trkpFolder+"trk-cluTime"+charge+vol,100,-20,20); + + aidaKF.histogram2D(eopFolder+"EoP_vs_trackP"+charge+vol+"_fid",200,0,6,200,0,2); + aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda"+charge+vol+"_fid",200,0.01,0.08,200,0,2); + aidaKF.histogram2D(eopFolder+"EoP_vs_phi"+charge+vol+"_fid",200,-0.2,0.2,200,0,2); + + + + aidaKF.histogram1D(eopFolder+"Xcluster"+charge+vol+"_fid",200,-ecalX,ecalX); + aidaKF.histogram1D(eopFolder+"Ycluster"+charge+vol+"_fid",200,-ecalX,ecalX); + aidaKF.histogram1D(eopFolder+"trk_clu_resX"+charge+vol+"_fid",200,-cxrange,cxrange); + aidaKF.histogram1D(eopFolder+"trk_clu_resY"+charge+vol+"_fid",200,-cyrange,cyrange); + + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vsX"+charge+vol+"_fid",200,-ecalX,ecalX,200,-cxrange,cxrange); + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vsY"+charge+vol+"_fid",200,-ecalX,ecalX,200,-cxrange,cxrange); + + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vsX"+charge+vol+"_fid",200,-ecalX,ecalX,200,-cyrange,cyrange); + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vsY"+charge+vol+"_fid",200,-ecalX,ecalX,200,-cyrange,cyrange); + + aidaKF.histogram2D(eopFolder+"trk_clu_resY_vstrkP"+charge+vol+"_fid",100,0.,5,200,-cyrange,cyrange); + aidaKF.histogram2D(eopFolder+"trk_clu_resX_vstrkP"+charge+vol+"_fid",100,0.,5,200,-cyrange,cyrange); + + aidaKF.histogram2D(eopFolder+"trkY_vs_tanL"+charge+vol+"_fid",200,-0.2,0.2,200,-100,100); + + + + + } + } + + aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda",200,-0.1,0.1,200,0,2); + aidaKF.histogram2D(eopFolder+"EoP_vs_phi",200,-0.2,0.2,200,0,2); + aidaKF.histogram3D(eopFolder+"EoP_vs_tanLambda_phi",200,-0.08,0.08,200,-0.2,0.2,200,0,2); + + aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda_fid",200,-0.1,0.1,200,0,2); + aidaKF.histogram2D(eopFolder+"EoP_vs_phi_fid",200,-0.2,0.2,200,0,2); + aidaKF.histogram3D(eopFolder+"EoP_vs_tanLambda_phi_fid",200,-0.08,0.08,200,-0.2,0.2,200,0,2); + + } + + + private void setupPlots() { + + + double xmax = 0.25; + double kxmax = 0.001; + + int nbins = 250; + List volumes = new ArrayList(); + volumes.add("_top"); + volumes.add("_bottom"); + int mod_2dplot_bins = sensors.size()+mod*2; + + for (String vol : volumes) { + aidaKF.histogram1D(resFolder+"bresidual_KF"+vol,nbins, -xmax, xmax); + aidaKF.histogram1D(resFolder+"uresidual_KF"+vol,nbins, -xmax, xmax); + aidaKF.histogram1D(resFolder+"bresidual_KF"+vol+"_L1L4",nbins,-xmax,xmax); + aidaKF.histogram1D(resFolder+"uresidual_KF"+vol+"_L1L4",nbins,-xmax,xmax); + aidaKF.histogram1D(resFolder+"bresidual_KF"+vol+"_L5L7",nbins,-xmax,xmax); + aidaKF.histogram1D(resFolder+"uresidual_KF"+vol+"_L5L7",nbins,-xmax,xmax); + + } + + //res/kinks TH2D + //5 empty bins to distinguish between top and bottom + + aidaKF.histogram2D(resFolder+"bresidual_KF_mod",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5, nbins, -xmax,xmax); + aidaKF.profile1D(resFolder+"bresidual_KF_mod_p",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5); + aidaKF.histogram2D(resFolder+"uresidual_KF_mod",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5, 400, -0.4,0.4); + aidaKF.profile1D(resFolder+"uresidual_KF_mod_p",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5); + + + //Hits vs channel + int nch = 400; + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL1b",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL2b",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL3b",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL4b",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL5b",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL6b",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL7b",nch,0,nch,nch,0,nch); + + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL1t",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL2t",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL3t",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL4t",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL5t",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL6t",nch,0,nch,nch,0,nch); + aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL7t",nch,0,nch,nch,0,nch); + + + + for (SiSensor sensor : sensors) { + + HpsSiSensor sens = (HpsSiSensor) sensor.getGeometry().getDetectorElement(); + xmax = 0.5; + nbins = 250; + int l = (sens.getLayerNumber() + 1) / 2; + if (l > 1) xmax = 0.05 + (l - 1) * 0.08; + aidaKF.histogram1D(resFolder+"residual_before_KF_" + sensor.getName(), nbins, -xmax, xmax); + + xmax = 0.250; + + if (l >= 6) + xmax = 0.250; + aidaKF.histogram1D(resFolder+"residual_after_KF_" + sensor.getName(), nbins, -xmax, xmax); + aidaKF.histogram1D(resFolder+"bresidual_KF_" + sensor.getName(), nbins, -xmax, xmax); + aidaKF.histogram1D(resFolder+"uresidual_KF_" + sensor.getName(), nbins, -xmax, xmax); + aidaKF.histogram2D(resFolder+"uresidual_KF_vs_u_hit_" + sensor.getName(),100,-20.0,20.0,100,-0.1,0.1); + aidaKF.histogram2D(resFolder+"uresidual_KF_vs_v_pred_" + sensor.getName(),300,-60.0,60.0,100,-0.1,0.1); + aidaKF.histogram2D(resFolder+"uresidual_KF_vs_dT_hit_" + sensor.getName(),100,-10.0,10.0,100,-0.1,0.1); + aidaKF.histogram2D(resFolder+"uresidual_KF_vs_dTs_hit_" + sensor.getName(),100,-5.0,5.0,100,-0.1,0.1); + + + aidaKF.histogram1D(epullFolder+"breserror_KF_" + sensor.getName(), nbins, 0.0, 0.1); + aidaKF.histogram1D(epullFolder+"ureserror_KF_" + sensor.getName(), nbins, 0.0, 0.2); + aidaKF.histogram1D(epullFolder+"bres_pull_KF_" + sensor.getName(), nbins, -5, 5); + aidaKF.histogram1D(epullFolder+"ures_pull_KF_" + sensor.getName(), nbins, -5, 5); + + aidaKF.histogram2D(resFolder+"residual_after_KF_vs_u_hit_" + sensor.getName(), 100, -20.0, 20.0, 100, -0.04, 0.04); + aidaKF.histogram2D(resFolder+"residual_after_KF_vs_v_predicted_" + sensor.getName(), 100, -55.0, 55.0, 100, -0.04, 0.04); + aidaKF.histogram2D(hitFolder+"hit_u_vs_v_sensor_frame_" + sensor.getName(), 300, -60.0, 60.0, 300, -25, 25); + aidaKF.histogram2D(hitFolder+"predicted_u_vs_v_sensor_frame_" + sensor.getName(), 100, -60, 60, 100, -25, 25); + aidaKF.histogram2D(hitFolder+"predicted_u_vs_v_pos_sensor_frame_" + sensor.getName(), 100, -60, 60, 100, -25, 25); + aidaKF.histogram2D(hitFolder+"predicted_u_vs_v_neg_sensor_frame_" + sensor.getName(), 100, -60, 60, 100, -25, 25); + + aidaKF.histogram1D(hitFolder+"raw_hit_t0_"+sensor.getName(),200, -100, 100.0); + aidaKF.histogram1D(hitFolder+"raw_hit_amplitude_"+sensor.getName(),200, 0.0, 4000.0); + aidaKF.histogram1D(hitFolder+"raw_hit_chisq_"+sensor.getName(),200, 0.0, 2.0); + + + + xmax = 0.0006; + if(l==1){ + xmax = 0.0002; + }else if(l==2){ + xmax = 0.0005; + }else if(l==3 || l==4){ + xmax = 0.0006; + }else if(l >= 5) { + if (sens.isBottomLayer() && sens.isAxial()) + xmax = 0.001; + if (sens.isTopLayer() && !sens.isAxial()) + xmax = 0.001; + } + // aidaKF.histogram1D(kinkFolder+"lambda_kink_" + sensor.getName(), 250, -xmax, xmax); + //aidaKF.histogram1D(kinkFolder+"phi_kink_" + sensor.getName(), 250, -xmax, xmax); + } + /* + aidaKF.histogram2D(kinkFolder+"lambda_kink_mod",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5,nbins,-0.001,0.001); + aidaKF.profile1D(kinkFolder+"lambda_kink_mod_p",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5); + aidaKF.histogram2D(kinkFolder+"phi_kink_mod",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5 ,nbins,-0.001,0.001); + aidaKF.profile1D(kinkFolder+"phi_kink_mod_p",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5); + */ + List charges = new ArrayList(); + charges.add(""); + charges.add("_pos"); + charges.add("_neg"); + + int nbins_t = 200; + + //For momentum + int nbins_p = 150; + double pmax = 4.; + + double z0max = 1; + double d0max = 5; + double z0bsmax = 0.2; + + aidaKF.histogram1D(trkpFolder+"nTracks",15,0,15); + for (String vol : volumes) { + for (String charge : charges) { + + + //TH1Ds + aidaKF.histogram1D(trkpFolder+"d0"+vol+charge,nbins_t,-5.0,5.0); + aidaKF.histogram1D(trkpFolder+"z0"+vol+charge,nbins_t,-1.3,1.3); + aidaKF.histogram1D(trkpFolder+"phi"+vol+charge,nbins_t,-0.06,0.06); + aidaKF.histogram1D(trkpFolder+"tanLambda"+vol+charge,nbins_t,-0.2,0.2); + aidaKF.histogram1D(trkpFolder+"trkTime"+vol+charge,nbins_t,-20,20); + aidaKF.histogram1D(trkpFolder+"trkTimeSD"+vol+charge,nbins_t,0,10); + + aidaKF.histogram1D(trkpFolder+"p"+vol+charge,nbins_p,0.,pmax); + aidaKF.histogram1D(trkpFolder+"p7h"+vol+charge,nbins_p,0.,pmax); + aidaKF.histogram1D(trkpFolder+"p6h"+vol+charge,nbins_p,0.,pmax); + aidaKF.histogram1D(trkpFolder+"p5h"+vol+charge,nbins_p,0.,pmax); + aidaKF.histogram1D(trkpFolder+"p_MissingLastLayer"+vol+charge,nbins_p,0.,pmax); + aidaKF.histogram1D(trkpFolder+"p_hole"+vol+charge,nbins_p,0.,pmax); + aidaKF.histogram1D(trkpFolder+"p_slot"+vol+charge,nbins_p,0.,pmax); + + aidaKF.histogram1D(trkpFolder+"Chi2"+vol+charge,nbins_t*2,0,200); + aidaKF.histogram1D(trkpFolder+"Chi2oNDF"+vol+charge,nbins_t*2,0,50); + aidaKF.histogram1D(trkpFolder+"nHits"+vol+charge,15,0,15); + aidaKF.histogram1D(trkpFolder+"trk_extr_or_x"+vol+charge,nbins_t,-3,3); + aidaKF.histogram1D(trkpFolder+"trk_extr_or_y"+vol+charge,nbins_t,-3,3); + aidaKF.histogram1D(trkpFolder+"trk_extr_bs_x"+vol+charge, 2*nbins_t, -5, 5); + aidaKF.histogram1D(trkpFolder+"trk_extr_bs_y"+vol+charge, 2*nbins_t, -5, 5); + aidaKF.histogram1D(trkpFolder+"trk_extr_bs_x_rk"+vol+charge, 2*nbins_t, -5, 5); + aidaKF.histogram1D(trkpFolder+"trk_extr_bs_y_rk"+vol+charge, 2*nbins_t, -3, 3); + aidaKF.histogram1D(trkpFolder+"d0_vs_bs_rk"+vol+charge, 2*nbins_t, -5, 5); + aidaKF.histogram1D(trkpFolder+"d0_vs_bs_extrap"+vol+charge, 2*nbins_t, -5, 5); + aidaKF.histogram1D(trkpFolder+"z0_vs_bs_rk"+vol+charge, 2*nbins_t, -z0bsmax, z0bsmax); + aidaKF.histogram1D(trkpFolder+"z0_vs_bs_extrap"+vol+charge, 2*nbins_t, -z0bsmax, z0bsmax); + aidaKF.histogram1D(trkpFolder+"z0_vs_bs"+vol+charge, 2*nbins_t, -z0bsmax, z0bsmax); + aidaKF.histogram1D(trkpFolder+"phi_vs_bs_extrap"+vol+charge,2*nbins_t, -0.06,0.06); + + + //TH2Ds + + aidaKF.histogram2D(trkpFolder+"d0_vs_phi"+vol+charge,nbins_t,-0.3,0.3,nbins_t,-5.0,5.0); + aidaKF.histogram2D(trkpFolder+"Chi2_vs_p"+vol+charge,nbins_p,0.0,pmax,nbins_t*2,0,200); + //aidaKF.histogram2D("d0_vs_phi_bs"+vol+charge,nbins_t,-5.0,5.0,nbins_t,-0.3,0.3); + aidaKF.histogram2D(trkpFolder+"d0_vs_tanLambda"+vol+charge,nbins_t,-0.2,0.2,nbins_t,-5.0,5.0); + aidaKF.histogram2D(trkpFolder+"d0_vs_p"+vol+charge, nbins_p,0.0,pmax,nbins_t,-5.0,5.0); + aidaKF.histogram2D(trkpFolder+"d0bs_vs_p"+vol+charge,nbins_p,0.0,pmax,nbins_t,-5.0,5.0); + aidaKF.histogram2D(trkpFolder+"z0_vs_p"+vol+charge, nbins_p,0.0,pmax,nbins_t,-5.0,5.0); + aidaKF.histogram2D(trkpFolder+"z0bs_vs_p"+vol+charge,nbins_p,0.0,pmax,nbins_t,-z0bsmax,z0bsmax); + aidaKF.histogram2D(trkpFolder+"z0_vs_tanLambda"+vol+charge, nbins_t,-0.1,0.1,nbins_t,-z0max,z0max); + aidaKF.histogram2D(trkpFolder+"z0bs_vs_tanLambda"+vol+charge,nbins_t,-0.1,0.1,nbins_t,-z0bsmax,z0bsmax); + + aidaKF.histogram2D(trkpFolder+"p_Missing1Hit"+vol+charge,8,0,8,nbins_p,0.0,pmax); + aidaKF.histogram2D(trkpFolder+"p_vs_phi"+vol+charge, nbins_t,-0.3,0.3, nbins_p,0.,pmax); + aidaKF.histogram2D(trkpFolder+"p_vs_tanLambda"+vol+charge,nbins_t,-0.2,0.2,nbins_p,0.,pmax); + aidaKF.histogram3D(trkpFolder+"p_vs_phi_tanLambda"+vol+charge, 50,-0.3,0.3,50,-0.2,0.2,100,0.,pmax); + + aidaKF.histogram2D(trkpFolder+"pT_vs_phi"+vol+charge, nbins_t,-0.3,0.3, nbins_p,0.,pmax); + aidaKF.histogram2D(trkpFolder+"pT_vs_tanLambda"+vol+charge,nbins_t,-0.2,0.2,nbins_p,0.,pmax); + + + + if (b_doDetailPlots) { + //TH2Ds - detail + int ibins = 15; + double start= -12; + double end = -5; + double step = (end-start) / (double)ibins; + for (int ibin = 0; ibin getTrackTime(Map sensorHits){ + double trackTime = 0.; + double trackTimeSD = 0.; + for (HpsSiSensor sensor : sensorHits.keySet()) { + trackTime += sensorHits.get(sensor).getTime(); + } + trackTime /= (float)sensorHits.size(); + + for (HpsSiSensor sensor : sensorHits.keySet()) { + trackTimeSD += Math.pow(trackTime - sensorHits.get(sensor).getTime(),2); + } + + trackTimeSD = Math.sqrt(trackTimeSD / ((float) sensorHits.size() - 1.)); + return new Pair(trackTime, trackTimeSD); + } + + public void endOfData() { + if (outputPlots != null) { + try { + aidaKF.saveAs(outputPlots); + + /* + // remove all KF histograms from heap after they have been written on output file + String[] type = aidaKF.tree().listObjectNames("/",true); + for (int i=0; i allHits) { + + // Clear the fitted raw hit map of old values + fittedRawTrackerHitMap.clear(); + + // Loop through all fitted hits and map them to their corresponding raw hits + for (LCRelation fittedHit : allHits) { + fittedRawTrackerHitMap.put(FittedRawTrackerHit.getRawTrackerHit(fittedHit), fittedHit); + } + } + + + private LCRelation getFittedHit(RawTrackerHit rawHit) { + return fittedRawTrackerHitMap.get(rawHit); + } +} From ff656ef6348b7fe35db5a5b22ce70d656ad68a9d Mon Sep 17 00:00:00 2001 From: Matt Graham Date: Thu, 31 Oct 2024 16:01:15 -0700 Subject: [PATCH 2/8] modify ReadoutDataManager to work with no spacing and add the needed parameters to the test steering file --- .../org/hps/readout/ReadoutDataManager.java | 2403 +++++++++-------- .../hps/steering/readout/TestNoSpacing.lcsim | 6 +- 2 files changed, 1223 insertions(+), 1186 deletions(-) diff --git a/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java b/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java index c5b4c8670..858405a1d 100755 --- a/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java +++ b/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java @@ -1,1184 +1,1219 @@ -package org.hps.readout; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.BitSet; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.PriorityQueue; -import java.util.Set; -import java.util.logging.Logger; - -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.record.evio.EvioEventConstants; -import org.hps.record.triggerbank.TSGenericObject; -import org.hps.readout.util.TimedList; -import org.hps.readout.util.TriggerTime; -import org.hps.readout.util.collection.LCIOCollection; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.hps.readout.util.collection.ManagedLCIOCollection; -import org.hps.readout.util.collection.ManagedLCIOData; -import org.hps.readout.util.collection.TriggeredLCIOData; -import org.hps.record.triggerbank.BaseTriggerData; -import org.lcsim.event.EventHeader; -import org.lcsim.event.GenericObject; -import org.lcsim.event.MCParticle; -import org.lcsim.event.base.BaseLCSimEvent; -import org.lcsim.geometry.IDDecoder; -import org.lcsim.lcio.LCIOWriter; -import org.lcsim.util.Driver; - -/** - * Class ReadoutDataManager is the central management - * class for the HPS readout chain. It is responsible for tracking - * most LCSim collection data, for syncing readout data production - * drivers and their output, for passing managed data objects to - * drivers as input, for managing triggers, and for writing out data. - *

- * More information on how a readout driver should interface - */ -public class ReadoutDataManager extends Driver { - /** - * Defines the default size of the readout window in units of - * nanoseconds. - */ - private static int readoutWindow = 200; - /** - * Defines the name of the output file for the run. - */ - private static String outputFileName = null; - /** - * Defines where the trigger time should occur within the default - * readout window. For instance, a value of t means - * that a period of time equal to t will be included - * before the trigger time, and a period of time equal to - * readoutWindow - t will be included after it. - */ - private static double triggerTimeDisplacement = 50; - /** - * Defines the length of an event in units of nanoseconds. - */ - private static final double BEAM_BUNCH_SIZE = 2.0; - /** - * Tracks the current simulation time in units of nanoseconds. - */ - private static double currentTime = 0.0; - /** - * Tracks all registered readout drivers. - */ - private static final Set driverSet = new HashSet(); - /** - * Tracks all data collections which are managed by the readout - * manager as well as their properties. - */ - private static final Map> collectionMap = new HashMap>(); - /** - * Tracks the time displacement for trigger drivers. - */ - private static final Map triggerTimeDisplacementMap = new HashMap(); - /** - * Stores trigger requests from trigger drivers until enough time - * has passed to fully buffer the necessary readout data. - */ - private static final PriorityQueue triggerQueue = new PriorityQueue(); - /** - * A writer for writing readout events to an output LCIO file. - */ - private static LCIOWriter outputWriter = null; - /** - * Tracks the total amount of time that must be buffered to allow - * for readout to occur. - */ - private static double bufferTotal = 0.0; - /** - * The total number of triggers seen. - */ - private static int triggers = 0; - /** - * The delay between when a trigger occurs, and when readout is - * performed. - */ - private static double triggerDelay = 0.0; - - /** - * Collection parameters for the dummy trigger bank object. - */ - private static LCIOCollection triggerBankParams = null; - - private static final String nl = String.format("%n"); - private static final Logger logger = Logger.getLogger(ReadoutDataManager.class.getSimpleName()); - - @Override - public void startOfData() { - // Instantiate the readout LCIO file. - if(outputFileName == null) { - throw new IllegalArgumentException("Error: Output file name not defined!"); - } - try { outputWriter = new LCIOWriter(new File(outputFileName)); } - catch (IOException e) { - e.printStackTrace(); - throw new RuntimeException(); - } - - // Create a collection for the dummy trigger bank. - LCIOCollectionFactory.setCollectionName("TriggerBank"); - LCIOCollectionFactory.setFlags(0); - triggerBankParams = LCIOCollectionFactory.produceLCIOCollection(GenericObject.class); - - // Get the total amount of time that the readout system must - // wait to make sure that all data has been safely buffered - // and exists to read out. - double longestBufferBefore = 0.0; - double longestBufferAfter = 0.0; - double longestLocalBuffer = 0.0; - double longestTimeDisplacement = 0.0; - double longestDisplacedAfter = 0.0; - double longestTriggerDisplacement = 0.0; - - StringBuffer initializationBuffer = new StringBuffer(); - initializationBuffer.append("Getting longest trigger time displacement..." + nl); - for(Entry entry : triggerTimeDisplacementMap.entrySet()) { - initializationBuffer.append(String.format("\t%-30s :: %.0f%n", entry.getKey().getClass().getSimpleName(), entry.getValue().doubleValue())); - longestTriggerDisplacement = Math.max(longestTriggerDisplacement, entry.getValue().doubleValue()); - } - initializationBuffer.append("Longest is: " + longestTriggerDisplacement + nl + nl); - - initializationBuffer.append("Getting longest driver collection buffers..." + nl); - for(ManagedLCIOData data : collectionMap.values()) { - double before = Double.isNaN(data.getCollectionParameters().getWindowBefore()) ? 0.0 : data.getCollectionParameters().getWindowBefore(); - double after = Double.isNaN(data.getCollectionParameters().getWindowAfter()) ? 0.0 : data.getCollectionParameters().getWindowAfter(); - double displacement = data.getCollectionParameters().getProductionDriver().getTimeDisplacement(); - double local = data.getCollectionParameters().getProductionDriver().getTimeNeededForLocalOutput(); - - initializationBuffer.append("\t" + data.getCollectionParameters().getCollectionName() + nl); - initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Buffer Before", before)); - initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Buffer After", after)); - initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Local Buffer", local)); - initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Displacement", displacement)); - initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Displaced After", (displacement + after))); - - longestBufferBefore = Math.max(longestBufferBefore, before); - longestBufferAfter = Math.max(longestBufferAfter, after); - longestLocalBuffer = Math.max(longestLocalBuffer, local); - longestTimeDisplacement = Math.max(longestTimeDisplacement, displacement); - longestDisplacedAfter = Math.max(longestDisplacedAfter, displacement + after); - } - initializationBuffer.append("Longest (before) is: " + longestBufferBefore + nl); - initializationBuffer.append("Longest (after) is: " + longestBufferAfter + nl); - initializationBuffer.append("Longest (local) is: " + longestLocalBuffer + nl); - initializationBuffer.append("Longest (displacement) is: " + longestTimeDisplacement + nl); - initializationBuffer.append("Longest (displacemed after) is: " + longestDisplacedAfter + nl + nl); - - initializationBuffer.append("Readout Window: " + readoutWindow + nl); - initializationBuffer.append("Trigger Offset: " + triggerTimeDisplacement + nl); - initializationBuffer.append("Default Before: " + triggerTimeDisplacement + nl); - initializationBuffer.append("Default After : " + (readoutWindow - triggerTimeDisplacement) + nl + nl); - - triggerDelay = Math.max(longestTriggerDisplacement, longestDisplacedAfter); - triggerDelay = Math.max(triggerDelay, longestLocalBuffer); - double totalNeededDisplacement = triggerDelay + longestBufferBefore + 150; - - initializationBuffer.append("Total Time Needed: " + totalNeededDisplacement + nl); - logger.fine(nl + initializationBuffer.toString()); - - // Determine the total amount of time that must be included - // in the data buffer in order to safely write out all data. - // An extra 150 ns of data is retained as a safety, just in - // case some driver needs to look unusually far back. - bufferTotal = totalNeededDisplacement; - } - - @Override - public void endOfData() { - try { outputWriter.close(); } - catch(IOException e) { - e.printStackTrace(); - throw new RuntimeException(); - } - - System.out.println("Wrote " + triggers + " triggers."); - } - - @Override - public void process(EventHeader event) { - // Check the trigger queue. - if(!triggerQueue.isEmpty()) { - // Check the earliest possible trigger write time. - boolean isWritable = getCurrentTime() >= triggerQueue.peek().getTriggerTime() + bufferTotal; - - // If all collections are available to be written, the - // event should be output. - if(isWritable) { - // Store the current trigger data. - TriggerTime trigger = triggerQueue.poll(); - - // 2016 MC only process one trigger, and no TS bank is stored - // 2019 MC can process multi-trigger, and TS bank is stored - List triggerList = new ArrayList(); - if(!trigger.getTriggerType().equals("noSet")) { - triggerList.add(trigger); - - // Iterate triggers in queue, remove next trigger if time of next trigger is the - // same as previous, until time of next trigger is not the same as previous or - // no next trigger - TriggerTime nextTrigger = null; - if(!triggerQueue.isEmpty()) nextTrigger = triggerQueue.peek(); - while((!triggerQueue.isEmpty()) && (nextTrigger.getTriggerTime() == trigger.getTriggerTime())) { - triggerList.add(nextTrigger); - triggerQueue.poll(); - if(!triggerQueue.isEmpty()) nextTrigger = triggerQueue.peek(); - } - } - - triggers++; - - // Make a new LCSim event. - int triggerEventNumber = event.getEventNumber() - ((int) Math.floor((getCurrentTime() - trigger.getTriggerTime()) / 2.0)); - EventHeader lcsimEvent = new BaseLCSimEvent(DatabaseConditionsManager.getInstance().getRun(), - triggerEventNumber, event.getDetectorName(), (long) 4 * (Math.round(trigger.getTriggerTime() / 4)), false); - - // 2016 MC only process one trigger, and no TS bank is stored - // 2019 MC can process multi-trigger, and TS bank is stored - if(!trigger.getTriggerType().equals("noSet")) { - List ts_list = new ArrayList(); - TSGenericObject tsBank = new TSGenericObject(); - int[] tsValues = new int[8]; - BitSet bits = new BitSet(32); - for(TriggerTime tri : triggerList) { - String triggerType = tri.getTriggerType(); - if(triggerType.equals(TriggerDriver.SINGLES0)) { - String topBot = tri.getTopBotStat(); - if(topBot.equals(TriggerDriver.TOP)) bits.set(0); - else if(topBot.equals(TriggerDriver.BOT)) bits.set(4); - else if(topBot.equals(TriggerDriver.TOPBOT)){ - bits.set(0); - bits.set(4); - } - } - else if(triggerType.equals(TriggerDriver.SINGLES1)) { - String topBot = tri.getTopBotStat(); - if(topBot.equals(TriggerDriver.TOP)) bits.set(1); - else if(topBot.equals(TriggerDriver.BOT)) bits.set(5); - else if(topBot.equals(TriggerDriver.TOPBOT)){ - bits.set(1); - bits.set(5); - } - } - else if(triggerType.equals(TriggerDriver.SINGLES2)) { - String topBot = tri.getTopBotStat(); - if(topBot.equals(TriggerDriver.TOP)) bits.set(2); - else if(topBot.equals(TriggerDriver.BOT)) bits.set(6); - else if(topBot.equals(TriggerDriver.TOPBOT)){ - bits.set(2); - bits.set(6); - } - } - else if(triggerType.equals(TriggerDriver.SINGLES3)) { - String topBot = tri.getTopBotStat(); - if(topBot.equals(TriggerDriver.TOP)) bits.set(3); - else if(topBot.equals(TriggerDriver.BOT)) bits.set(7); - else if(topBot.equals(TriggerDriver.TOPBOT)){ - bits.set(3); - bits.set(7); - } - } - else if(triggerType.equals(TriggerDriver.PAIR0)) bits.set(8); - else if(triggerType.equals(TriggerDriver.PAIR1)) bits.set(9); - else if(triggerType.equals(TriggerDriver.PAIR2)) bits.set(10); - else if(triggerType.equals(TriggerDriver.PAIR3)) bits.set(11); - else if(triggerType.equals(TriggerDriver.PULSER)) bits.set(15); - else if(triggerType.equals(TriggerDriver.FEE)) { - String topBot = tri.getTopBotStat(); - if(topBot.equals(TriggerDriver.TOP)) bits.set(18); - else if(topBot.equals(TriggerDriver.BOT)) bits.set(19); - else if(topBot.equals(TriggerDriver.TOPBOT)){ - bits.set(18); - bits.set(19); - } - } - } - - tsValues[0] = EvioEventConstants.TS_BANK_TAG; - - if(!bits.isEmpty()) { - tsValues[5] = (int)bits.toLongArray()[0]; - tsValues[6] = (int)bits.toLongArray()[0]; - } - else { - tsValues[5] = 0; - tsValues[6] = 0; - } - - // Filling the generic objects with the integer array - tsBank.setValues(tsValues); - - // Adding the generic object to the list - ts_list.add(tsBank); - lcsimEvent.put("TSBank", ts_list, TSGenericObject.class, 0); - } - - - // Calculate the readout window time range. This is - // used for any production driver that does not have - // a manually specified output range. - double startTime = trigger.getTriggerTime() - triggerTimeDisplacement; - double endTime = startTime + readoutWindow; - - logger.finer("Trigger Time: " + trigger.getTriggerTime()); - logger.finer("Default Time Range: " + startTime + " - " + endTime); - - // All readout output is initially stored in a single - // object. This allows the readout from multiple - // drivers to be merged, if needed, and also prevents - // duplicate instances of an object from being - // written. - Map> triggeredDataMap = new HashMap>(); - - // Write out the writable collections into the event. - for(ManagedLCIOData collectionData : collectionMap.values()) { - // Ignore any collections that are not set to be persisted. - if(!collectionData.getCollectionParameters().isPersistent()) { - continue; - } - - // Get the local start and end times. A driver - // may manually specify an amount of time before - // and after the trigger time which should be - // output. If this is the case, use it instead of - // the time found through use of the readout - // window/trigger time displacement calculation. - double localStartTime = startTime; - if(!Double.isNaN(collectionData.getCollectionParameters().getWindowBefore())) { - localStartTime = trigger.getTriggerTime() - collectionData.getCollectionParameters().getWindowBefore(); - } - - double localEndTime = endTime; - if(!Double.isNaN(collectionData.getCollectionParameters().getWindowAfter())) { - localEndTime = trigger.getTriggerTime() + collectionData.getCollectionParameters().getWindowAfter(); - } - - // Get the object data for the time range. - addDataToMap(collectionData.getCollectionParameters(), localStartTime, localEndTime, triggeredDataMap); - } - - // Write out any special on-trigger collections into - // the event as well. These are collated so that if - // more than one driver contributes to the same - // collection, they will be properly merged. - for(ReadoutDriver driver : driverSet) { - // Get the special collection(s) from the current - // driver, if it exists. - Collection> onTriggerData = driver.getOnTriggerData(trigger.getTriggerTime()); - - // If there are special collections, write them. - if(onTriggerData != null) { - for(TriggeredLCIOData triggerData : onTriggerData) { - addDataToMap(triggerData, triggerData.getCollectionParameters().getObjectType(), triggeredDataMap); - } - } - } - - // Create the dummy trigger bank data and store it. - TriggeredLCIOData triggerBankData = new TriggeredLCIOData(triggerBankParams); - triggerBankData.getData().add(new BaseTriggerData(new int[8])); - addDataToMap(triggerBankData, triggerBankData.getCollectionParameters().getObjectType(), triggeredDataMap); - - // Readout timestamps should be generated for both - // the "system" and the trigger. This corresponds to - // the simulation time at which the trigger occurred. - // Note that there is a "trigger delay" parameter in - // the old readout, but this does not exist in the - // new system, so both timestamps are the same. - - // Calculate the simulation trigger time. - double simTriggerTime = trigger.getTriggerTime() + triggerTimeDisplacementMap.get(trigger.getTriggeringDriver()).doubleValue(); - ReadoutTimestamp systemTimestamp = new ReadoutTimestamp(ReadoutTimestamp.SYSTEM_TRIGGERBITS, simTriggerTime); - ReadoutTimestamp triggerTimestamp = new ReadoutTimestamp(ReadoutTimestamp.SYSTEM_TRIGGERTIME, simTriggerTime); - LCIOCollectionFactory.setCollectionName(ReadoutTimestamp.collectionName); - LCIOCollection timestampCollection = LCIOCollectionFactory.produceLCIOCollection(ReadoutTimestamp.class); - TriggeredLCIOData timestampData = new TriggeredLCIOData(timestampCollection); - timestampData.getData().add(systemTimestamp); - timestampData.getData().add(triggerTimestamp); - addDataToMap(timestampData, timestampData.getCollectionParameters().getObjectType(), triggeredDataMap); - - // Store all of the data collections. - for(TriggeredLCIOData triggerData : triggeredDataMap.values()) { - storeCollection(triggerData, lcsimEvent); - } - - // Write the event to the output file. - try { outputWriter.write(lcsimEvent); } - catch(IOException e) { - e.printStackTrace(); - throw new RuntimeException(); - } - } - } - - // Remove all data from the buffer that occurs before the max - // buffer length cut-off. - for(ManagedLCIOData data : collectionMap.values()) { - while(!data.getData().isEmpty() && (data.getData().getFirst().getTime() < (getCurrentTime() - 500))) { - data.getData().removeFirst(); - } - } - - // Increment the current time. - currentTime += BEAM_BUNCH_SIZE; - } - - /** - * Adds a new set of data objects to the data manager at the time - * specified. - * @param collectionName - The collection name to which the data - * should be added. - * @param dataTime - The truth time at which the data objects - * occurred. This represents the time of the object, corrected - * for time displacement due to buffering on processing on the - * part of the production driver. - * @param data - The data to add. - * @param dataType - The class type of the data objects. - * @throws IllegalArgumentException Occurs if either the - * collection specified does not exist, or if the object type of - * the data objects does not match the object type of the data in - * the collection. - * @param - Specifies the class type of the data to be added - * to the collection. - */ - public static final void addData(String collectionName, double dataTime, Collection data, Class dataType) { - // Validate that the collection has been registered. - if(!collectionMap.containsKey(collectionName)) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" has not been registered."); - } - - // Get the collection data object. - ManagedLCIOData collectionData = collectionMap.get(collectionName); - - // Validate that the data type is correct. - if(!collectionData.getCollectionParameters().getObjectType().isAssignableFrom(dataType)) { - throw new IllegalArgumentException("Error: Saw data type \"" + dataType.getSimpleName() + "\" but expected data type \"" - + collectionData.getCollectionParameters().getObjectType().getSimpleName() + "\" instead."); - } - - // If the data is empty, then there is no need to add it to - // the buffer. - if(!data.isEmpty()) { - // Add the new data to the data buffer. - double time = Double.isNaN(dataTime) ? currentTime - collectionData.getCollectionParameters().getGlobalTimeDisplacement() : dataTime; - LinkedList> dataBuffer = collectionData.getData(); - dataBuffer.add(new TimedList(time, data)); - } - } - - /** - * Adds a new set of data objects to the data manager at a time - * calculated based on the current simulation time corrected by - * the total time offset of the collection. - * @param collectionName - The collection name to which the data - * should be added. - * @param data - The data to add. - * @param dataType - The class type of the data objects. - * @throws IllegalArgumentException Occurs if either the - * collection specified does not exist, or if the object type of - * the data objects does not match the object type of the data in - * the collection. - * @param - Specifies the class type of the data to be added - * to the collection. - */ - public static final void addData(String collectionName, Collection data, Class dataType) { - addData(collectionName, Double.NaN, data, dataType); - } - - /** - * Checks whether or not a collection has been populated up to - * the indicated time. - * @param collectionName - The collection to check. - * @param time - The time at which the collection should be - * filled. - * @return Returns true if the collection has data - * generated up to at least the specified time, and - * false if it does not. - */ - public static final boolean checkCollectionStatus(String collectionName, double time) { - // Verify that the requested collection exists. - if(!collectionMap.containsKey(collectionName)) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" is not a registered collection."); - } - - // Otherwise, check if enough time has passed for the driver - // which controls to the collection to have produced output - // for the requested time period. - return time <= getCurrentTime() - collectionMap.get(collectionName).getCollectionParameters().getGlobalTimeDisplacement(); - } - - /** - * Gets the length in nanoseconds of a single event (beam bunch). - * @return Returns the length in ns of a single beam bunch. - */ - public static final double getBeamBunchSize() { - return BEAM_BUNCH_SIZE; - } - - /** - * Gets the LCIO collection parameters for a collection. - * @param collectionName - The name of the collection. - * @param objectType - The data type of the collection. - * @return Returns the collection parameters. - */ - @SuppressWarnings("unchecked") - public static final LCIOCollection getCollectionParameters(String collectionName, Class objectType) { - // Verify that the requested collection actually exists. - if(!collectionMap.containsKey(collectionName)) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); - } - - // Get the collection and check that it is of the appropriate - // parameterized type. - LCIOCollection collection = collectionMap.get(collectionName).getCollectionParameters(); - if(collection.getObjectType() != objectType) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" is of type " + collection.getObjectType().getSimpleName() - + " while object type " + objectType.getSimpleName() + " was requested."); - } - - // Return the collection parameters. - // NOTE: This type case is safe, since it is verified above - // that the collection object is of the same class type - // as the parameterized type. - return (LCIOCollection) collection; - } - - /** - * Gets the current simulation time in nanoseconds. - * @return Returns the simulation time in nanoseconds. - */ - public static final double getCurrentTime() { - return currentTime; - } - - /** - * Gets a collection of data objects from a collection within the - * time range specified. - * @param startTime - The (inclusive) start of the time range. - * @param endTime The (exclusive) end of the time range. - * @param collectionName - The name of the collection. - * @param objectType - The class type of the data stored in the - * collection. - * @return Returns the data in the specified time range in the - * data collection in a {@link java.util.List List}. - * @param - Specifies the class type of the data stored in - * the collection. - */ - public static final Collection getData(double startTime, double endTime, String collectionName, Class objectType) { - return getDataList(startTime, endTime, collectionName, objectType); - } - - /** - * Gets the {@link org.lcsim.geometry.IDDecoder IDDecoder} that - * is used for the indicated managed collection, if it exists. - * @param collectionName - The collection to which the decoder - * should correspond. - * @return Returns the decoder for the collection, if it exists, - * and null otherwise. - */ - public static final IDDecoder getIDDecoder(String collectionName) { - // Verify that the requested collection actually exists. - if(!collectionMap.containsKey(collectionName)) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); - } - - // Get the collection and obtain the ID decoder, if possible. - // If it does not exist, then leave it as a value of null. - LCIOCollection collection = collectionMap.get(collectionName).getCollectionParameters(); - IDDecoder decoder = null; - try { decoder = collection.getProductionDriver().getIDDecoder(collectionName); } - catch(UnsupportedOperationException e) { } - - // Return the decoder. - return decoder; - } - - /** - * Gets the default size of the readout window. - * @return Returns the default size of the readout window in - * units of nanoseconds. - */ - public static final int getReadoutWindow() { - return readoutWindow; - } - - /** - * Gets the total amount of time by which a collection is - * displaced between the actual truth data's occurrence in the - * simulation, and the time at which the object is actually - * produced. This includes both the time displacement introduced - * by the collection's production driver as well as displacement - * introduced by any preceding drivers that serve as input for - * the production driver. - * @param collectionName - The name of the collection. - * @return Returns the total time displacement in nanoseconds. - */ - public static final double getTotalTimeDisplacement(String collectionName) { - if(collectionMap.containsKey(collectionName)) { - return collectionMap.get(collectionName).getCollectionParameters().getGlobalTimeDisplacement(); - } else { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); - } - } - - /** - * Gets the time displacement between when a trigger occurs, and - * when the triggered data is actually written out. - * @return Returns the trigger delay in units of nanoseconds. - */ - public static final double getTriggerDelay() { - return bufferTotal; - } - - /** - * Gets the time by which the trigger is offset in the readout - * window. - * @return Returns the trigger offset in units of nanoseconds. - */ - public static final double getTriggerOffset() { - return triggerTimeDisplacement; - } - - /** - * Adds a managed collection to the data manager. All collections - * which serve as either input or output from a {@link - * org.hps.readout.ReadoutDriver ReadoutDriver} are required to - * be registered and managed by the data manager. On-trigger - * special collections should not be registered. - * @param params - An object describing the collection - * parameters. - * @param persistent - Sets whether this collection should be - * written out to the readout LCIO file. - * @param - Specifies the class type of the data stored by - * the collection. - */ - public static final void registerCollection(LCIOCollection params, boolean persistent) { - registerCollection(params, persistent, Double.NaN, Double.NaN); - } - - /** - * Adds a managed collection to the data manager. All collections - * which serve as either input or output from a {@link - * org.hps.readout.ReadoutDriver ReadoutDriver} are required to - * be registered and managed by the data manager. On-trigger - * special collections should not be registered. - * @param params - An object describing the collection - * parameters. - * @param persistent - Sets whether this collection should be - * written out to the readout LCIO file. - * @param readoutWindowBefore - Defines a custom period of time - * before the trigger time in which all objects will be output to - * the output LCIO file. - * @param readoutWindowAfter - Defines a custom period of time - * after the trigger time in which all objects will be output to - * the output LCIO file. - * @param - Specifies the class type of the data stored by - * the collection. - */ - public static final void registerCollection(LCIOCollection params, boolean persistent, double readoutWindowBefore, double readoutWindowAfter) { - // Make sure that all arguments are defined. - if(params.getCollectionName() == null) { - throw new IllegalArgumentException("Error: Collection name must be defined."); - } - if(params.getObjectType() == null) { - throw new IllegalArgumentException("Error: Collection object class must be defined."); - } - if(params.getProductionDriver() == null) { - throw new IllegalArgumentException("Error: Production driver must be defined."); - } - - // There should only be one collection for a given name. - if(collectionMap.containsKey(params.getCollectionName())) { - throw new IllegalArgumentException("Collection \"" + params.getCollectionName() + "\" of object type " - + params.getObjectType().getSimpleName() + " already exists."); - } - - // Create a collection data object. - double timeDisplacement = getTotalTimeDisplacement(params.getCollectionName(), params.getProductionDriver()); - LCIOCollectionFactory.setParams(params); - LCIOCollectionFactory.setGlobalTimeDisplacement(timeDisplacement); - LCIOCollectionFactory.setPersistent(persistent); - LCIOCollectionFactory.setWindowAfter(readoutWindowAfter); - LCIOCollectionFactory.setWindowBefore(readoutWindowBefore); - ManagedLCIOCollection managedParams = LCIOCollectionFactory.produceManagedLCIOCollection(params.getObjectType()); - ManagedLCIOData collectionData = new ManagedLCIOData(managedParams); - collectionMap.put(params.getCollectionName(), collectionData); - - // Store the readout driver in the driver set. - driverSet.add(params.getProductionDriver()); - - logger.config("Registered collection \"" + managedParams.getCollectionName() + "\" of class type " - + managedParams.getObjectType().getSimpleName() + "."); - - StringBuffer detailsBuffer = new StringBuffer(); - detailsBuffer.append("\tCollection Name :: " + params.getCollectionName()); - detailsBuffer.append("\tFlags :: " + Integer.toHexString(params.getFlags())); - detailsBuffer.append("\tObject Type :: " + params.getObjectType().getSimpleName()); - detailsBuffer.append("\tReadout Name :: " + params.getReadoutName()); - detailsBuffer.append("\tProduction Driver :: " + params.getProductionDriver().getClass().getSimpleName()); - logger.finer(nl + detailsBuffer.toString()); - } - - /** - * Registers a {@link org.hps.readout.ReadoutDriver - * ReadoutDriver} with the data manager. All readout drivers must - * be registered in order for their on-trigger special data to be - * added to the output event. - * @param productionDriver - The readout driver to register. - */ - public static final void registerReadoutDriver(ReadoutDriver productionDriver) { - // Trigger drivers are registered differently. - if(productionDriver instanceof TriggerDriver) { - logger.warning(nl + "Attempted to register TriggerDriver \"" + productionDriver.getClass().getSimpleName() + "\" as a readout driver." - + nl + " Trigger drivers are registered via the method \"registerTrigger(TriggerDriver)\"." - + nl + " Ignoring request."); - return; - } - - // Add the readout driver. - driverSet.add(productionDriver); - logger.config("Registered driver: " + productionDriver.getClass().getSimpleName()); - } - - /** - * Registers a trigger driver with the data manager. - * @param triggerDriver - The trigger driver to register. - */ - public static final void registerTrigger(TriggerDriver triggerDriver) { - // Get the total time displacement for the trigger driver. - double timeDisplacement = getTotalTimeDisplacement("", triggerDriver); - - // Store the time displacement in the trigger driver map. - triggerTimeDisplacementMap.put(triggerDriver, timeDisplacement); - logger.config("Registered trigger: " + triggerDriver.getClass().getSimpleName()); - } - - /** - * Changes the "readout name" parameter for a collection, while - * retaining all other parameters and stored data. - * @param collectionName - The name of the collection to modify. - * @param objectType - The object type of the collection. - * @param newReadoutName - The new name for the "readout name" - * parameter. - * @param - The object type of the data stored in the - * collection that is to be modified. - */ - public static final void updateCollectionReadoutName(String collectionName, Class objectType, String newReadoutName) { - // Get the collection. - if(!collectionMap.containsKey(collectionName)) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); - } - ManagedLCIOData oldData = collectionMap.get(collectionName); - - // Make a new managed LCIO collection with the new readout. - LCIOCollectionFactory.setParams(oldData.getCollectionParameters()); - LCIOCollectionFactory.setReadoutName(newReadoutName); - ManagedLCIOCollection newParams = LCIOCollectionFactory.produceManagedLCIOCollection(objectType); - - // Create a new managed LCIO data object and transfer all the - // data from the old object to it. - ManagedLCIOData newData = new ManagedLCIOData(newParams); - for(TimedList oldList : oldData.getData()) { - newData.getData().add(oldList); - } - - // Put the new data list into the map. - collectionMap.put(collectionName, newData); - } - - /** - * Indicates that the specified driver saw a trigger and readout - * should occur. - * @param driver - The triggering driver. - * @throws IllegalArgumentException Occurs if the argument - * triggering driver is not registered as a trigger driver with - * the data manager. - */ - static final void sendTrigger(TriggerDriver driver) { - // Check that the triggering driver is registered as a - // trigger driver. - if(!triggerTimeDisplacementMap.containsKey(driver)) { - throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); - } - - // Calculate the trigger and readout times. - double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); - - // Add the trigger to the trigger queue. - triggerQueue.add(new TriggerTime(triggerTime, driver)); - logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " - + driver.getClass().getSimpleName() + "."); - } - - /** - * Indicates that the specified driver saw a trigger and readout - * should occur. - * @param driver - The triggering driver. - * @param triggerType - trigger type. - * @throws IllegalArgumentException Occurs if the argument - * triggering driver is not registered as a trigger driver with - * the data manager. - */ - static final void sendTrigger(TriggerDriver driver, String triggerType) { - // Check that the triggering driver is registered as a - // trigger driver. - if(!triggerTimeDisplacementMap.containsKey(driver)) { - throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); - } - - // Calculate the trigger and readout times. - double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); - - // Add the trigger to the trigger queue. - triggerQueue.add(new TriggerTime(triggerTime, triggerType, driver)); - logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " - + driver.getClass().getSimpleName() + "."); - } - - /** - * Indicates that the specified driver saw a trigger and readout - * should occur. - * @param driver - The triggering driver. - * @param triggerType - trigger type. - * @throws IllegalArgumentException Occurs if the argument - * triggering driver is not registered as a trigger driver with - * the data manager. - */ - static final void sendTrigger(TriggerDriver driver, String triggerType, String topBot) { - // Check that the triggering driver is registered as a - // trigger driver. - if(!triggerTimeDisplacementMap.containsKey(driver)) { - throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); - } - - // Calculate the trigger and readout times. - double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); - - // Add the trigger to the trigger queue. - triggerQueue.add(new TriggerTime(triggerTime, triggerType, topBot, driver)); - logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " - + driver.getClass().getSimpleName() + "."); - } - - /** - * Adds a data collection corresponding to a given parameter set - * to the data map. If there is already data existing under the - * same collection, it is then merged without duplicating any - * objects. - * @param params - The collection parameters for the data. - * @param readoutData - The data to add. - * @param triggeredDataMap - The data map into which the data - * collection should be added. - */ - @SuppressWarnings("unchecked") - private static final void addDataToMap(LCIOCollection params, Collection readoutData, Map> triggeredDataMap) { - // Check and see if an output collection already exists for - // this parameter set. If so, use it; otherwise, make a new - // entry for it. - TriggeredLCIOData untypedData = triggeredDataMap.get(params.getCollectionName()); - TriggeredLCIOData typedData = null; - if(untypedData == null) { - typedData = new TriggeredLCIOData(params); - triggeredDataMap.put(params.getCollectionName(), typedData); - } else { - // Verify that the collection parameters are the same. - if(untypedData.getCollectionParameters().equals(params)) { - // Note: This cast is safe; if the parameters objects - // are the same, then the object sets are necessarily - // of the same object type. - typedData = (TriggeredLCIOData) untypedData; - } else { - throw new RuntimeException("Error: Found multiple collections of name \"" + params.getCollectionName() + "\", but of differing definitions."); - } - } - - // Add the readout data to the collection data list. - typedData.getData().addAll(readoutData); - } - - /** - * Adds data stored in the collection defined by the parameters - * object within the given time range to the data map. If there - * is already data existing under the same collection, it is then - * merged without duplicating any objects. - * @param params - The parameters for the collection to add. - * @param startTime - The start of the time range within the data - * buffer from which data should be drawn. - * @param endTime - The end of the time range within the data - * buffer from which data should be drawn. - * @param triggeredDataMap - The data map into which the data - * collection should be added. - */ - private static final void addDataToMap(LCIOCollection params, double startTime, double endTime, Map> triggeredDataMap) { - // Get the readout data objects. - List triggerData = getDataList(startTime, endTime, params.getCollectionName(), params.getObjectType()); - - // Pass the readout data to the merging method. - addDataToMap(params, triggerData, triggeredDataMap); - } - - /** - * Adds data stored in a triggered collection object to the data - * map. If there is already data existing under the same - * collection, it is then merged without duplicating any objects. - * @param dataList - The collection data to be added. - * @param objectType - the object type of the collection data. - * @param triggeredDataMap - The data map into which the data - * collection should be added. - */ - private static final void addDataToMap(TriggeredLCIOData dataList, Class objectType, Map> triggeredDataMap) { - // Check that the parameters object is the same object type - // as is specified. - if(dataList.getCollectionParameters().getObjectType() != objectType) { - throw new IllegalArgumentException("Error: Can not process class type " + dataList.getCollectionParameters().getObjectType().getSimpleName() - + " as class type " + objectType.getSimpleName()); - } else { - // Note: This is safe - the above check requires that the - // object type be the parameterized type. - @SuppressWarnings("unchecked") - TriggeredLCIOData typedDataList = (TriggeredLCIOData) dataList; - Set triggerData = typedDataList.getData(); - addDataToMap(typedDataList.getCollectionParameters(), triggerData, triggeredDataMap); - } - } - - /** - * Gets a list of data objects from a collection within the time - * range specified. - * @param startTime - The (inclusive) start of the time range. - * @param endTime The (exclusive) end of the time range. - * @param collectionName - The name of the collection. - * @param objectType - The class type of the data stored in the - * collection. - * @return Returns the data in the specified time range in the - * data collection in a {@link java.util.List List}. - * @param - Specifies the class type of the data stored in - * the collection. - */ - private static final List getDataList(double startTime, double endTime, String collectionName, Class objectType) { - // Get the collection data. - ManagedLCIOData collectionData = collectionMap.get(collectionName); - - // Verify that the a collection of the indicated name exists - // and that it is the appropriate object type. - if(collectionData != null) { - if(!objectType.isAssignableFrom(collectionData.getCollectionParameters().getObjectType())) { - throw new IllegalArgumentException("Error: Expected object type " + objectType.getSimpleName() + " for collection \"" + collectionName - + ",\" but found object type " + collectionData.getCollectionParameters().getObjectType().getSimpleName() + "."); - } - } else { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); - } - - // Throw an alert if the earliest requested time precedes the - // earliest buffered time, and similarly for the latest time. - LinkedList> dataLists = collectionData.getData(); - - // Iterate through the data and collect all entries that have - // an associated truth time within the given time range. The - // lower bound is inclusive, the upper bound is exclusive. - List outputList = new ArrayList(); - for(TimedList dataList : dataLists) { - if(dataList.getTime() >= startTime && dataList.getTime() < endTime) { - // Add the items from the list to the output list. - for(Object o : dataList) { - if(objectType.isAssignableFrom(o.getClass())) { - outputList.add(objectType.cast(o)); - } else { - throw new ClassCastException("Error: Unexpected object of type " + o.getClass().getSimpleName() + " in collection \"" - + collectionName + ".\""); - } - } - } - } - - // Return the collected items. - return outputList; - } - - /** - * Calculates the total time displacement of a collection based - * on its production driver, and the time displacements of the - * input collections from which it is produced. This is processed - * recursively, so all time displacements in the production chain - * of a collection are accounted for. - * @param collectionName - The name of the collection. - * @param productionDriver - The driver which produces the - * collection. - * @return Returns the total time displacement for the collection - * in units of nanoseconds. - */ - private static final double getTotalTimeDisplacement(String collectionName, ReadoutDriver productionDriver) { - // Make sure that there are no circular dependencies. - validateDependencies(collectionName, productionDriver, new HashSet()); - - // The total time displacement is the displacement of the - // dependent collection with the largest displacement plus - // the local time displacement of the production driver. - double baseDisplacement = 0.0; - for(String dependency : productionDriver.getDependencies()) { - // All dependencies must already be registered. Check - // that it is. - double dependencyDisplacement = 0.0; - if(collectionMap.containsKey(dependency)) { - dependencyDisplacement = collectionMap.get(dependency).getCollectionParameters().getGlobalTimeDisplacement(); - } else { - throw new IllegalArgumentException("Error: Collection \"" + dependency + "\" has not been registered."); - } - - // Select the largest value. - baseDisplacement = Math.max(baseDisplacement, dependencyDisplacement); - } - - // Return the sum of the largest base displacement and the - // production driver. - return baseDisplacement + productionDriver.getTimeDisplacement(); - } - - /** - * Writes an entire {@link org.hps.readout.ReadoutDriver - * ReadoutDriver} on-trigger data collection to the specified - * output event. - * @param collectionData - The on-trigger readout data. - * @param event - The output event. - * @param - Specifies the class type of the data that is to be - * written to the output event. - */ - private static final void storeCollection(TriggeredLCIOData collectionData, EventHeader event) { - storeCollection(collectionData.getCollectionParameters().getCollectionName(), collectionData.getCollectionParameters().getObjectType(), - collectionData.getCollectionParameters().getFlags(), collectionData.getCollectionParameters().getReadoutName(), - collectionData.getData(), event); - } - - /** - * Writes the specified data to the output event. - * @param collectionName - The name of the output collection. - * @param objectType - The class of the output collection data - * objects. - * @param flags - Any LCIO flags which apply to the data. - * @param readoutName - The readout name for the data, if it is - * needed. null should be used if a readout name is - * not required. - * @param collectionData - A parameterized {@link - * java.util.Collection Collection} containing the data that is - * to be written. - * @param event - The event into which the data is to be written. - * @param - Specifies the class type of the data that is to be - * written to the output event. - */ - private static final void storeCollection(String collectionName, Class objectType, int flags, String readoutName, - Collection collectionData, EventHeader event) { - // The input collection must be a list. If it already is, - // just use it directly. Otherwise, copy the contents into an - // appropriately parameterized list. - List dataList; - if(collectionData instanceof List) { - dataList = (List) collectionData; - } else { - dataList = new ArrayList(collectionData.size()); - dataList.addAll(collectionData); - } - - // Place the data into the LCIO event. - if(readoutName == null) { - event.put(collectionName, dataList, objectType, flags); - } else { - event.put(collectionName, dataList, objectType, flags, readoutName); - } - - logger.finer(String.format("Output %d objects of type %s to collection \"%s\".", dataList.size(), objectType.getSimpleName(), collectionName)); - } - - /** - * Checks that the dependencies of a collection are valid. This - * consists of checking that any dependencies are registered with - * the data management driver and also that there are no circular - * dependencies present. - * @param collectionName - The name of the collection to check. - * @param productionDriver - The production driver of the - * collection to check. - * @param dependents - A set containing all of the collections - * which depend on this driver in the chain. Note that for the - * first call, this should be an empty set. - */ - private static final void validateDependencies(String collectionName, ReadoutDriver productionDriver, Set dependents) { - // Add the current driver to the list of dependents. - dependents.add(collectionName); - - // Check that none of the dependencies of the current driver - // are also dependencies of a driver higher in the chain. - for(String dependency : productionDriver.getDependencies()) { - // The dependency must be registered. - if(!collectionMap.containsKey(dependency)) { - throw new IllegalArgumentException("Error: Collection \"" + dependency + "\" has not been registered."); - } - - // Get the collection data for the dependency. - ManagedLCIOData collectionData = collectionMap.get(dependency); - - // Check that this dependency does not depend on the - // higher driver. - for(String dependent : dependents) { - if(collectionData.getCollectionParameters().getProductionDriver().getDependencies().contains(dependent)) { - throw new IllegalStateException("Error: Collection \"" + dependency + "\" depends on collection \"" + dependent - + ",\" but collection \"" + dependent + "\" also depends of collection \"" + dependency + ".\""); - } - } - - // If there are no detected circular dependencies, then - // perform the same check on the dependencies of this - // dependency. - Set dependencySet = new HashSet(); - dependencySet.addAll(dependents); - validateDependencies(dependency, collectionData.getCollectionParameters().getProductionDriver(), dependencySet); - } - } - - /** - * Adds the argument particle and all of its direct parents to - * the particle set. - * @param particle - The base particle. - * @param particleSet - The set that is to contain the full tree - * of particles. - */ - public static final void addParticleParents(MCParticle particle, Set particleSet) { - // Add the particle itself to the set. - particleSet.add(particle); - - // If the particle has parents, run the same method for each - // parent. - if(!particle.getParents().isEmpty()) { - for(MCParticle parent : particle.getParents()) { - addParticleParents(parent, particleSet); - } - } - } - - /** - * Sets the output file name for the triggered data file. - * @param filepath - The file path for the output file. - */ - public static final void setOutputFile(String filepath) { - outputFileName = filepath; - } - - /** - * Sets the default size of the readout window, in units of - * nanoseconds. Note that this can be overridden by specific - * drivers. - * @param nanoseconds - The length of the default readout window. - */ - public static final void setReadoutWindow(int nanoseconds) { - readoutWindow = nanoseconds; - } -} +package org.hps.readout; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.BitSet; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.PriorityQueue; +import java.util.Set; +import java.util.logging.Logger; + +import org.hps.conditions.database.DatabaseConditionsManager; +import org.hps.record.evio.EvioEventConstants; +import org.hps.record.triggerbank.TSGenericObject; +import org.hps.readout.util.TimedList; +import org.hps.readout.util.TriggerTime; +import org.hps.readout.util.collection.LCIOCollection; +import org.hps.readout.util.collection.LCIOCollectionFactory; +import org.hps.readout.util.collection.ManagedLCIOCollection; +import org.hps.readout.util.collection.ManagedLCIOData; +import org.hps.readout.util.collection.TriggeredLCIOData; +import org.hps.record.triggerbank.BaseTriggerData; +import org.lcsim.event.EventHeader; +import org.lcsim.event.GenericObject; +import org.lcsim.event.MCParticle; +import org.lcsim.event.base.BaseLCSimEvent; +import org.lcsim.geometry.IDDecoder; +import org.lcsim.lcio.LCIOWriter; +import org.lcsim.util.Driver; + +/** + * Class ReadoutDataManager is the central management + * class for the HPS readout chain. It is responsible for tracking + * most LCSim collection data, for syncing readout data production + * drivers and their output, for passing managed data objects to + * drivers as input, for managing triggers, and for writing out data. + *

+ * More information on how a readout driver should interface + */ +public class ReadoutDataManager extends Driver { + /** + * Defines the default size of the readout window in units of + * nanoseconds. + */ + private static int readoutWindow = 200; + /** + * Defines the name of the output file for the run. + */ + private static String outputFileName = null; + /** + * Defines where the trigger time should occur within the default + * readout window. For instance, a value of t means + * that a period of time equal to t will be included + * before the trigger time, and a period of time equal to + * readoutWindow - t will be included after it. + */ + private static double triggerTimeDisplacement = 50; + /** + * Defines the length of an event in units of nanoseconds. + */ + private static final double BEAM_BUNCH_SIZE = 2.0; + /** + * Tracks the current simulation time in units of nanoseconds. + */ + private static double currentTime = 0.0; + /** + * Tracks all registered readout drivers. + */ + private static final Set driverSet = new HashSet(); + /** + * Tracks all data collections which are managed by the readout + * manager as well as their properties. + */ + private static final Map> collectionMap = new HashMap>(); + /** + * Tracks the time displacement for trigger drivers. + */ + private static final Map triggerTimeDisplacementMap = new HashMap(); + /** + * Stores trigger requests from trigger drivers until enough time + * has passed to fully buffer the necessary readout data. + */ + private static final PriorityQueue triggerQueue = new PriorityQueue(); + /** + * A writer for writing readout events to an output LCIO file. + */ + private static LCIOWriter outputWriter = null; + /** + * Tracks the total amount of time that must be buffered to allow + * for readout to occur. + */ + private static double bufferTotal = 0.0; + /** + * The total number of triggers seen. + */ + private static int triggers = 0; + /** + * The delay between when a trigger occurs, and when readout is + * performed. + */ + private static double triggerDelay = 0.0; + /** + * sets the time passed between LCIO events. + * Used for running MC without putting bunches + * between "signal" events. + * For running of MC-generated beam, set this to 1 + * For pulser-data overlay, set to 250 + * (250*2ns = 500ns empty time) + */ + private static int effectiveBunches = 1; + /** + * set buffer time to 0 + * used for pulser-data overlay MC readout. + * set to false for MC-generated beam + */ + private static boolean zeroBuffer = false; + /** + * Collection parameters for the dummy trigger bank object. + */ + private static LCIOCollection triggerBankParams = null; + + private static final String nl = String.format("%n"); + private static final Logger logger = Logger.getLogger(ReadoutDataManager.class.getSimpleName()); + + @Override + public void startOfData() { + // Instantiate the readout LCIO file. + if(outputFileName == null) { + throw new IllegalArgumentException("Error: Output file name not defined!"); + } + try { outputWriter = new LCIOWriter(new File(outputFileName)); } + catch (IOException e) { + e.printStackTrace(); + throw new RuntimeException(); + } + + // Create a collection for the dummy trigger bank. + LCIOCollectionFactory.setCollectionName("TriggerBank"); + LCIOCollectionFactory.setFlags(0); + triggerBankParams = LCIOCollectionFactory.produceLCIOCollection(GenericObject.class); + + // Get the total amount of time that the readout system must + // wait to make sure that all data has been safely buffered + // and exists to read out. + double longestBufferBefore = 0.0; + double longestBufferAfter = 0.0; + double longestLocalBuffer = 0.0; + double longestTimeDisplacement = 0.0; + double longestDisplacedAfter = 0.0; + double longestTriggerDisplacement = 0.0; + + StringBuffer initializationBuffer = new StringBuffer(); + initializationBuffer.append("Getting longest trigger time displacement..." + nl); + for(Entry entry : triggerTimeDisplacementMap.entrySet()) { + initializationBuffer.append(String.format("\t%-30s :: %.0f%n", entry.getKey().getClass().getSimpleName(), entry.getValue().doubleValue())); + longestTriggerDisplacement = Math.max(longestTriggerDisplacement, entry.getValue().doubleValue()); + } + initializationBuffer.append("Longest is: " + longestTriggerDisplacement + nl + nl); + + initializationBuffer.append("Getting longest driver collection buffers..." + nl); + for(ManagedLCIOData data : collectionMap.values()) { + double before = Double.isNaN(data.getCollectionParameters().getWindowBefore()) ? 0.0 : data.getCollectionParameters().getWindowBefore(); + double after = Double.isNaN(data.getCollectionParameters().getWindowAfter()) ? 0.0 : data.getCollectionParameters().getWindowAfter(); + double displacement = data.getCollectionParameters().getProductionDriver().getTimeDisplacement(); + double local = data.getCollectionParameters().getProductionDriver().getTimeNeededForLocalOutput(); + + initializationBuffer.append("\t" + data.getCollectionParameters().getCollectionName() + nl); + initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Buffer Before", before)); + initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Buffer After", after)); + initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Local Buffer", local)); + initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Displacement", displacement)); + initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Displaced After", (displacement + after))); + + longestBufferBefore = Math.max(longestBufferBefore, before); + longestBufferAfter = Math.max(longestBufferAfter, after); + longestLocalBuffer = Math.max(longestLocalBuffer, local); + longestTimeDisplacement = Math.max(longestTimeDisplacement, displacement); + longestDisplacedAfter = Math.max(longestDisplacedAfter, displacement + after); + } + initializationBuffer.append("Longest (before) is: " + longestBufferBefore + nl); + initializationBuffer.append("Longest (after) is: " + longestBufferAfter + nl); + initializationBuffer.append("Longest (local) is: " + longestLocalBuffer + nl); + initializationBuffer.append("Longest (displacement) is: " + longestTimeDisplacement + nl); + initializationBuffer.append("Longest (displacemed after) is: " + longestDisplacedAfter + nl + nl); + + initializationBuffer.append("Readout Window: " + readoutWindow + nl); + initializationBuffer.append("Trigger Offset: " + triggerTimeDisplacement + nl); + initializationBuffer.append("Default Before: " + triggerTimeDisplacement + nl); + initializationBuffer.append("Default After : " + (readoutWindow - triggerTimeDisplacement) + nl + nl); + + triggerDelay = Math.max(longestTriggerDisplacement, longestDisplacedAfter); + triggerDelay = Math.max(triggerDelay, longestLocalBuffer); + double totalNeededDisplacement = triggerDelay + longestBufferBefore + 150; + + initializationBuffer.append("Total Time Needed: " + totalNeededDisplacement + nl); + logger.fine(nl + initializationBuffer.toString()); + + // Determine the total amount of time that must be included + // in the data buffer in order to safely write out all data. + // An extra 150 ns of data is retained as a safety, just in + // case some driver needs to look unusually far back. + bufferTotal = totalNeededDisplacement; + if(zeroBuffer) + bufferTotal = 0.0; + } + + @Override + public void endOfData() { + try { outputWriter.close(); } + catch(IOException e) { + e.printStackTrace(); + throw new RuntimeException(); + } + + System.out.println("Wrote " + triggers + " triggers."); + } + + @Override + public void process(EventHeader event) { + // Check the trigger queue. + if(!triggerQueue.isEmpty()) { + // Check the earliest possible trigger write time. + boolean isWritable = getCurrentTime() >= triggerQueue.peek().getTriggerTime() + bufferTotal; + // If all collections are available to be written, the + // event should be output. + if(isWritable) { + // Store the current trigger data. + TriggerTime trigger = triggerQueue.poll(); + + // 2016 MC only process one trigger, and no TS bank is stored + // 2019 MC can process multi-trigger, and TS bank is stored + List triggerList = new ArrayList(); + if(!trigger.getTriggerType().equals("noSet")) { + triggerList.add(trigger); + + // Iterate triggers in queue, remove next trigger if time of next trigger is the + // same as previous, until time of next trigger is not the same as previous or + // no next trigger + TriggerTime nextTrigger = null; + if(!triggerQueue.isEmpty()) nextTrigger = triggerQueue.peek(); + while((!triggerQueue.isEmpty()) && (nextTrigger.getTriggerTime() == trigger.getTriggerTime())) { + triggerList.add(nextTrigger); + triggerQueue.poll(); + if(!triggerQueue.isEmpty()) nextTrigger = triggerQueue.peek(); + } + } + + triggers++; + + // Make a new LCSim event. + int triggerEventNumber = event.getEventNumber() - ((int) Math.floor((getCurrentTime() - trigger.getTriggerTime()) / 2.0)); + EventHeader lcsimEvent = new BaseLCSimEvent(DatabaseConditionsManager.getInstance().getRun(), + triggerEventNumber, event.getDetectorName(), (long) 4 * (Math.round(trigger.getTriggerTime() / 4)), false); + + // 2016 MC only process one trigger, and no TS bank is stored + // 2019 MC can process multi-trigger, and TS bank is stored + if(!trigger.getTriggerType().equals("noSet")) { + List ts_list = new ArrayList(); + TSGenericObject tsBank = new TSGenericObject(); + int[] tsValues = new int[8]; + BitSet bits = new BitSet(32); + for(TriggerTime tri : triggerList) { + String triggerType = tri.getTriggerType(); + if(triggerType.equals(TriggerDriver.SINGLES0)) { + String topBot = tri.getTopBotStat(); + if(topBot.equals(TriggerDriver.TOP)) bits.set(0); + else if(topBot.equals(TriggerDriver.BOT)) bits.set(4); + else if(topBot.equals(TriggerDriver.TOPBOT)){ + bits.set(0); + bits.set(4); + } + } + else if(triggerType.equals(TriggerDriver.SINGLES1)) { + String topBot = tri.getTopBotStat(); + if(topBot.equals(TriggerDriver.TOP)) bits.set(1); + else if(topBot.equals(TriggerDriver.BOT)) bits.set(5); + else if(topBot.equals(TriggerDriver.TOPBOT)){ + bits.set(1); + bits.set(5); + } + } + else if(triggerType.equals(TriggerDriver.SINGLES2)) { + String topBot = tri.getTopBotStat(); + if(topBot.equals(TriggerDriver.TOP)) bits.set(2); + else if(topBot.equals(TriggerDriver.BOT)) bits.set(6); + else if(topBot.equals(TriggerDriver.TOPBOT)){ + bits.set(2); + bits.set(6); + } + } + else if(triggerType.equals(TriggerDriver.SINGLES3)) { + String topBot = tri.getTopBotStat(); + if(topBot.equals(TriggerDriver.TOP)) bits.set(3); + else if(topBot.equals(TriggerDriver.BOT)) bits.set(7); + else if(topBot.equals(TriggerDriver.TOPBOT)){ + bits.set(3); + bits.set(7); + } + } + else if(triggerType.equals(TriggerDriver.PAIR0)) bits.set(8); + else if(triggerType.equals(TriggerDriver.PAIR1)) bits.set(9); + else if(triggerType.equals(TriggerDriver.PAIR2)) bits.set(10); + else if(triggerType.equals(TriggerDriver.PAIR3)) bits.set(11); + else if(triggerType.equals(TriggerDriver.PULSER)) bits.set(15); + else if(triggerType.equals(TriggerDriver.FEE)) { + String topBot = tri.getTopBotStat(); + if(topBot.equals(TriggerDriver.TOP)) bits.set(18); + else if(topBot.equals(TriggerDriver.BOT)) bits.set(19); + else if(topBot.equals(TriggerDriver.TOPBOT)){ + bits.set(18); + bits.set(19); + } + } + } + + tsValues[0] = EvioEventConstants.TS_BANK_TAG; + + if(!bits.isEmpty()) { + tsValues[5] = (int)bits.toLongArray()[0]; + tsValues[6] = (int)bits.toLongArray()[0]; + } + else { + tsValues[5] = 0; + tsValues[6] = 0; + } + + // Filling the generic objects with the integer array + tsBank.setValues(tsValues); + + // Adding the generic object to the list + ts_list.add(tsBank); + lcsimEvent.put("TSBank", ts_list, TSGenericObject.class, 0); + } + + + // Calculate the readout window time range. This is + // used for any production driver that does not have + // a manually specified output range. + double startTime = trigger.getTriggerTime() - triggerTimeDisplacement; + double endTime = startTime + readoutWindow; + + logger.finer("Trigger Time: " + trigger.getTriggerTime()); + logger.finer("Default Time Range: " + startTime + " - " + endTime); + + // All readout output is initially stored in a single + // object. This allows the readout from multiple + // drivers to be merged, if needed, and also prevents + // duplicate instances of an object from being + // written. + Map> triggeredDataMap = new HashMap>(); + + // Write out the writable collections into the event. + for(ManagedLCIOData collectionData : collectionMap.values()) { + // Ignore any collections that are not set to be persisted. + if(!collectionData.getCollectionParameters().isPersistent()) { + continue; + } + + // Get the local start and end times. A driver + // may manually specify an amount of time before + // and after the trigger time which should be + // output. If this is the case, use it instead of + // the time found through use of the readout + // window/trigger time displacement calculation. + double localStartTime = startTime; + if(!Double.isNaN(collectionData.getCollectionParameters().getWindowBefore())) { + localStartTime = trigger.getTriggerTime() - collectionData.getCollectionParameters().getWindowBefore(); + } + + double localEndTime = endTime; + if(!Double.isNaN(collectionData.getCollectionParameters().getWindowAfter())) { + localEndTime = trigger.getTriggerTime() + collectionData.getCollectionParameters().getWindowAfter(); + } + + // Get the object data for the time range. + addDataToMap(collectionData.getCollectionParameters(), localStartTime, localEndTime, triggeredDataMap); + } + + // Write out any special on-trigger collections into + // the event as well. These are collated so that if + // more than one driver contributes to the same + // collection, they will be properly merged. + for(ReadoutDriver driver : driverSet) { + // Get the special collection(s) from the current + // driver, if it exists. + Collection> onTriggerData = driver.getOnTriggerData(trigger.getTriggerTime()); + + // If there are special collections, write them. + if(onTriggerData != null) { + for(TriggeredLCIOData triggerData : onTriggerData) { + addDataToMap(triggerData, triggerData.getCollectionParameters().getObjectType(), triggeredDataMap); + } + } + } + + // Create the dummy trigger bank data and store it. + TriggeredLCIOData triggerBankData = new TriggeredLCIOData(triggerBankParams); + triggerBankData.getData().add(new BaseTriggerData(new int[8])); + addDataToMap(triggerBankData, triggerBankData.getCollectionParameters().getObjectType(), triggeredDataMap); + + // Readout timestamps should be generated for both + // the "system" and the trigger. This corresponds to + // the simulation time at which the trigger occurred. + // Note that there is a "trigger delay" parameter in + // the old readout, but this does not exist in the + // new system, so both timestamps are the same. + + // Calculate the simulation trigger time. + double simTriggerTime = trigger.getTriggerTime() + triggerTimeDisplacementMap.get(trigger.getTriggeringDriver()).doubleValue(); + ReadoutTimestamp systemTimestamp = new ReadoutTimestamp(ReadoutTimestamp.SYSTEM_TRIGGERBITS, simTriggerTime); + ReadoutTimestamp triggerTimestamp = new ReadoutTimestamp(ReadoutTimestamp.SYSTEM_TRIGGERTIME, simTriggerTime); + LCIOCollectionFactory.setCollectionName(ReadoutTimestamp.collectionName); + LCIOCollection timestampCollection = LCIOCollectionFactory.produceLCIOCollection(ReadoutTimestamp.class); + TriggeredLCIOData timestampData = new TriggeredLCIOData(timestampCollection); + timestampData.getData().add(systemTimestamp); + timestampData.getData().add(triggerTimestamp); + addDataToMap(timestampData, timestampData.getCollectionParameters().getObjectType(), triggeredDataMap); + + // Store all of the data collections. + for(TriggeredLCIOData triggerData : triggeredDataMap.values()) { + storeCollection(triggerData, lcsimEvent); + } + + // Write the event to the output file. + try { outputWriter.write(lcsimEvent); } + catch(IOException e) { + e.printStackTrace(); + throw new RuntimeException(); + } + } + } + + // Remove all data from the buffer that occurs before the max + // buffer length cut-off. + for(ManagedLCIOData data : collectionMap.values()) { + while(!data.getData().isEmpty() && (data.getData().getFirst().getTime() < (getCurrentTime() - 500))) { + data.getData().removeFirst(); + } + } + + // Increment the current time. + currentTime += effectiveBunches*BEAM_BUNCH_SIZE; + } + + /** + * Adds a new set of data objects to the data manager at the time + * specified. + * @param collectionName - The collection name to which the data + * should be added. + * @param dataTime - The truth time at which the data objects + * occurred. This represents the time of the object, corrected + * for time displacement due to buffering on processing on the + * part of the production driver. + * @param data - The data to add. + * @param dataType - The class type of the data objects. + * @throws IllegalArgumentException Occurs if either the + * collection specified does not exist, or if the object type of + * the data objects does not match the object type of the data in + * the collection. + * @param - Specifies the class type of the data to be added + * to the collection. + */ + public static final void addData(String collectionName, double dataTime, Collection data, Class dataType) { + // Validate that the collection has been registered. + if(!collectionMap.containsKey(collectionName)) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" has not been registered."); + } + + // Get the collection data object. + ManagedLCIOData collectionData = collectionMap.get(collectionName); + + // Validate that the data type is correct. + if(!collectionData.getCollectionParameters().getObjectType().isAssignableFrom(dataType)) { + throw new IllegalArgumentException("Error: Saw data type \"" + dataType.getSimpleName() + "\" but expected data type \"" + + collectionData.getCollectionParameters().getObjectType().getSimpleName() + "\" instead."); + } + + // If the data is empty, then there is no need to add it to + // the buffer. + if(!data.isEmpty()) { + // Add the new data to the data buffer. + double time = Double.isNaN(dataTime) ? currentTime - collectionData.getCollectionParameters().getGlobalTimeDisplacement() : dataTime; + LinkedList> dataBuffer = collectionData.getData(); + dataBuffer.add(new TimedList(time, data)); + } + } + + /** + * Adds a new set of data objects to the data manager at a time + * calculated based on the current simulation time corrected by + * the total time offset of the collection. + * @param collectionName - The collection name to which the data + * should be added. + * @param data - The data to add. + * @param dataType - The class type of the data objects. + * @throws IllegalArgumentException Occurs if either the + * collection specified does not exist, or if the object type of + * the data objects does not match the object type of the data in + * the collection. + * @param - Specifies the class type of the data to be added + * to the collection. + */ + public static final void addData(String collectionName, Collection data, Class dataType) { + addData(collectionName, Double.NaN, data, dataType); + } + + /** + * Checks whether or not a collection has been populated up to + * the indicated time. + * @param collectionName - The collection to check. + * @param time - The time at which the collection should be + * filled. + * @return Returns true if the collection has data + * generated up to at least the specified time, and + * false if it does not. + */ + public static final boolean checkCollectionStatus(String collectionName, double time) { + // Verify that the requested collection exists. + if(!collectionMap.containsKey(collectionName)) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" is not a registered collection."); + } + + // Otherwise, check if enough time has passed for the driver + // which controls to the collection to have produced output + // for the requested time period. + return time <= getCurrentTime() - collectionMap.get(collectionName).getCollectionParameters().getGlobalTimeDisplacement(); + } + + /** + * Gets the length in nanoseconds of a single event (beam bunch). + * @return Returns the length in ns of a single beam bunch. + */ + public static final double getBeamBunchSize() { + return BEAM_BUNCH_SIZE; + } + + /** + * Gets the LCIO collection parameters for a collection. + * @param collectionName - The name of the collection. + * @param objectType - The data type of the collection. + * @return Returns the collection parameters. + */ + @SuppressWarnings("unchecked") + public static final LCIOCollection getCollectionParameters(String collectionName, Class objectType) { + // Verify that the requested collection actually exists. + if(!collectionMap.containsKey(collectionName)) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); + } + + // Get the collection and check that it is of the appropriate + // parameterized type. + LCIOCollection collection = collectionMap.get(collectionName).getCollectionParameters(); + if(collection.getObjectType() != objectType) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" is of type " + collection.getObjectType().getSimpleName() + + " while object type " + objectType.getSimpleName() + " was requested."); + } + + // Return the collection parameters. + // NOTE: This type case is safe, since it is verified above + // that the collection object is of the same class type + // as the parameterized type. + return (LCIOCollection) collection; + } + + /** + * Gets the current simulation time in nanoseconds. + * @return Returns the simulation time in nanoseconds. + */ + public static final double getCurrentTime() { + return currentTime; + } + + /** + * Gets a collection of data objects from a collection within the + * time range specified. + * @param startTime - The (inclusive) start of the time range. + * @param endTime The (exclusive) end of the time range. + * @param collectionName - The name of the collection. + * @param objectType - The class type of the data stored in the + * collection. + * @return Returns the data in the specified time range in the + * data collection in a {@link java.util.List List}. + * @param - Specifies the class type of the data stored in + * the collection. + */ + public static final Collection getData(double startTime, double endTime, String collectionName, Class objectType) { + return getDataList(startTime, endTime, collectionName, objectType); + } + + /** + * Gets the {@link org.lcsim.geometry.IDDecoder IDDecoder} that + * is used for the indicated managed collection, if it exists. + * @param collectionName - The collection to which the decoder + * should correspond. + * @return Returns the decoder for the collection, if it exists, + * and null otherwise. + */ + public static final IDDecoder getIDDecoder(String collectionName) { + // Verify that the requested collection actually exists. + if(!collectionMap.containsKey(collectionName)) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); + } + + // Get the collection and obtain the ID decoder, if possible. + // If it does not exist, then leave it as a value of null. + LCIOCollection collection = collectionMap.get(collectionName).getCollectionParameters(); + IDDecoder decoder = null; + try { decoder = collection.getProductionDriver().getIDDecoder(collectionName); } + catch(UnsupportedOperationException e) { } + + // Return the decoder. + return decoder; + } + + /** + * Gets the default size of the readout window. + * @return Returns the default size of the readout window in + * units of nanoseconds. + */ + public static final int getReadoutWindow() { + return readoutWindow; + } + + /** + * Gets the total amount of time by which a collection is + * displaced between the actual truth data's occurrence in the + * simulation, and the time at which the object is actually + * produced. This includes both the time displacement introduced + * by the collection's production driver as well as displacement + * introduced by any preceding drivers that serve as input for + * the production driver. + * @param collectionName - The name of the collection. + * @return Returns the total time displacement in nanoseconds. + */ + public static final double getTotalTimeDisplacement(String collectionName) { + if(collectionMap.containsKey(collectionName)) { + return collectionMap.get(collectionName).getCollectionParameters().getGlobalTimeDisplacement(); + } else { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); + } + } + + /** + * Gets the time displacement between when a trigger occurs, and + * when the triggered data is actually written out. + * @return Returns the trigger delay in units of nanoseconds. + */ + public static final double getTriggerDelay() { + return bufferTotal; + } + + /** + * Gets the time by which the trigger is offset in the readout + * window. + * @return Returns the trigger offset in units of nanoseconds. + */ + public static final double getTriggerOffset() { + return triggerTimeDisplacement; + } + + /** + * Adds a managed collection to the data manager. All collections + * which serve as either input or output from a {@link + * org.hps.readout.ReadoutDriver ReadoutDriver} are required to + * be registered and managed by the data manager. On-trigger + * special collections should not be registered. + * @param params - An object describing the collection + * parameters. + * @param persistent - Sets whether this collection should be + * written out to the readout LCIO file. + * @param - Specifies the class type of the data stored by + * the collection. + */ + public static final void registerCollection(LCIOCollection params, boolean persistent) { + registerCollection(params, persistent, Double.NaN, Double.NaN); + } + + /** + * Adds a managed collection to the data manager. All collections + * which serve as either input or output from a {@link + * org.hps.readout.ReadoutDriver ReadoutDriver} are required to + * be registered and managed by the data manager. On-trigger + * special collections should not be registered. + * @param params - An object describing the collection + * parameters. + * @param persistent - Sets whether this collection should be + * written out to the readout LCIO file. + * @param readoutWindowBefore - Defines a custom period of time + * before the trigger time in which all objects will be output to + * the output LCIO file. + * @param readoutWindowAfter - Defines a custom period of time + * after the trigger time in which all objects will be output to + * the output LCIO file. + * @param - Specifies the class type of the data stored by + * the collection. + */ + public static final void registerCollection(LCIOCollection params, boolean persistent, double readoutWindowBefore, double readoutWindowAfter) { + // Make sure that all arguments are defined. + if(params.getCollectionName() == null) { + throw new IllegalArgumentException("Error: Collection name must be defined."); + } + if(params.getObjectType() == null) { + throw new IllegalArgumentException("Error: Collection object class must be defined."); + } + if(params.getProductionDriver() == null) { + throw new IllegalArgumentException("Error: Production driver must be defined."); + } + + // There should only be one collection for a given name. + if(collectionMap.containsKey(params.getCollectionName())) { + throw new IllegalArgumentException("Collection \"" + params.getCollectionName() + "\" of object type " + + params.getObjectType().getSimpleName() + " already exists."); + } + + // Create a collection data object. + double timeDisplacement = getTotalTimeDisplacement(params.getCollectionName(), params.getProductionDriver()); + LCIOCollectionFactory.setParams(params); + LCIOCollectionFactory.setGlobalTimeDisplacement(timeDisplacement); + LCIOCollectionFactory.setPersistent(persistent); + LCIOCollectionFactory.setWindowAfter(readoutWindowAfter); + LCIOCollectionFactory.setWindowBefore(readoutWindowBefore); + ManagedLCIOCollection managedParams = LCIOCollectionFactory.produceManagedLCIOCollection(params.getObjectType()); + ManagedLCIOData collectionData = new ManagedLCIOData(managedParams); + collectionMap.put(params.getCollectionName(), collectionData); + + // Store the readout driver in the driver set. + driverSet.add(params.getProductionDriver()); + + logger.config("Registered collection \"" + managedParams.getCollectionName() + "\" of class type " + + managedParams.getObjectType().getSimpleName() + "."); + + StringBuffer detailsBuffer = new StringBuffer(); + detailsBuffer.append("\tCollection Name :: " + params.getCollectionName()); + detailsBuffer.append("\tFlags :: " + Integer.toHexString(params.getFlags())); + detailsBuffer.append("\tObject Type :: " + params.getObjectType().getSimpleName()); + detailsBuffer.append("\tReadout Name :: " + params.getReadoutName()); + detailsBuffer.append("\tProduction Driver :: " + params.getProductionDriver().getClass().getSimpleName()); + logger.finer(nl + detailsBuffer.toString()); + } + + /** + * Registers a {@link org.hps.readout.ReadoutDriver + * ReadoutDriver} with the data manager. All readout drivers must + * be registered in order for their on-trigger special data to be + * added to the output event. + * @param productionDriver - The readout driver to register. + */ + public static final void registerReadoutDriver(ReadoutDriver productionDriver) { + // Trigger drivers are registered differently. + if(productionDriver instanceof TriggerDriver) { + logger.warning(nl + "Attempted to register TriggerDriver \"" + productionDriver.getClass().getSimpleName() + "\" as a readout driver." + + nl + " Trigger drivers are registered via the method \"registerTrigger(TriggerDriver)\"." + + nl + " Ignoring request."); + return; + } + + // Add the readout driver. + driverSet.add(productionDriver); + logger.config("Registered driver: " + productionDriver.getClass().getSimpleName()); + } + + /** + * Registers a trigger driver with the data manager. + * @param triggerDriver - The trigger driver to register. + */ + public static final void registerTrigger(TriggerDriver triggerDriver) { + // Get the total time displacement for the trigger driver. + double timeDisplacement = getTotalTimeDisplacement("", triggerDriver); + + // Store the time displacement in the trigger driver map. + triggerTimeDisplacementMap.put(triggerDriver, timeDisplacement); + logger.config("Registered trigger: " + triggerDriver.getClass().getSimpleName()); + } + + /** + * Changes the "readout name" parameter for a collection, while + * retaining all other parameters and stored data. + * @param collectionName - The name of the collection to modify. + * @param objectType - The object type of the collection. + * @param newReadoutName - The new name for the "readout name" + * parameter. + * @param - The object type of the data stored in the + * collection that is to be modified. + */ + public static final void updateCollectionReadoutName(String collectionName, Class objectType, String newReadoutName) { + // Get the collection. + if(!collectionMap.containsKey(collectionName)) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); + } + ManagedLCIOData oldData = collectionMap.get(collectionName); + + // Make a new managed LCIO collection with the new readout. + LCIOCollectionFactory.setParams(oldData.getCollectionParameters()); + LCIOCollectionFactory.setReadoutName(newReadoutName); + ManagedLCIOCollection newParams = LCIOCollectionFactory.produceManagedLCIOCollection(objectType); + + // Create a new managed LCIO data object and transfer all the + // data from the old object to it. + ManagedLCIOData newData = new ManagedLCIOData(newParams); + for(TimedList oldList : oldData.getData()) { + newData.getData().add(oldList); + } + + // Put the new data list into the map. + collectionMap.put(collectionName, newData); + } + + /** + * Indicates that the specified driver saw a trigger and readout + * should occur. + * @param driver - The triggering driver. + * @throws IllegalArgumentException Occurs if the argument + * triggering driver is not registered as a trigger driver with + * the data manager. + */ + static final void sendTrigger(TriggerDriver driver) { + // Check that the triggering driver is registered as a + // trigger driver. + if(!triggerTimeDisplacementMap.containsKey(driver)) { + throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); + } + + // Calculate the trigger and readout times. + double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); + + // Add the trigger to the trigger queue. + triggerQueue.add(new TriggerTime(triggerTime, driver)); + logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " + + driver.getClass().getSimpleName() + "."); + } + + /** + * Indicates that the specified driver saw a trigger and readout + * should occur. + * @param driver - The triggering driver. + * @param triggerType - trigger type. + * @throws IllegalArgumentException Occurs if the argument + * triggering driver is not registered as a trigger driver with + * the data manager. + */ + static final void sendTrigger(TriggerDriver driver, String triggerType) { + // Check that the triggering driver is registered as a + // trigger driver. + if(!triggerTimeDisplacementMap.containsKey(driver)) { + throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); + } + + // Calculate the trigger and readout times. + double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); + + // Add the trigger to the trigger queue. + triggerQueue.add(new TriggerTime(triggerTime, triggerType, driver)); + logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " + + driver.getClass().getSimpleName() + "."); + } + + /** + * Indicates that the specified driver saw a trigger and readout + * should occur. + * @param driver - The triggering driver. + * @param triggerType - trigger type. + * @throws IllegalArgumentException Occurs if the argument + * triggering driver is not registered as a trigger driver with + * the data manager. + */ + static final void sendTrigger(TriggerDriver driver, String triggerType, String topBot) { + // Check that the triggering driver is registered as a + // trigger driver. + if(!triggerTimeDisplacementMap.containsKey(driver)) { + throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); + } + + // Calculate the trigger and readout times. + double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); + + // Add the trigger to the trigger queue. + triggerQueue.add(new TriggerTime(triggerTime, triggerType, topBot, driver)); + logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " + + driver.getClass().getSimpleName() + "."); + } + + /** + * Adds a data collection corresponding to a given parameter set + * to the data map. If there is already data existing under the + * same collection, it is then merged without duplicating any + * objects. + * @param params - The collection parameters for the data. + * @param readoutData - The data to add. + * @param triggeredDataMap - The data map into which the data + * collection should be added. + */ + @SuppressWarnings("unchecked") + private static final void addDataToMap(LCIOCollection params, Collection readoutData, Map> triggeredDataMap) { + // Check and see if an output collection already exists for + // this parameter set. If so, use it; otherwise, make a new + // entry for it. + TriggeredLCIOData untypedData = triggeredDataMap.get(params.getCollectionName()); + TriggeredLCIOData typedData = null; + if(untypedData == null) { + typedData = new TriggeredLCIOData(params); + triggeredDataMap.put(params.getCollectionName(), typedData); + } else { + // Verify that the collection parameters are the same. + if(untypedData.getCollectionParameters().equals(params)) { + // Note: This cast is safe; if the parameters objects + // are the same, then the object sets are necessarily + // of the same object type. + typedData = (TriggeredLCIOData) untypedData; + } else { + throw new RuntimeException("Error: Found multiple collections of name \"" + params.getCollectionName() + "\", but of differing definitions."); + } + } + + // Add the readout data to the collection data list. + typedData.getData().addAll(readoutData); + } + + /** + * Adds data stored in the collection defined by the parameters + * object within the given time range to the data map. If there + * is already data existing under the same collection, it is then + * merged without duplicating any objects. + * @param params - The parameters for the collection to add. + * @param startTime - The start of the time range within the data + * buffer from which data should be drawn. + * @param endTime - The end of the time range within the data + * buffer from which data should be drawn. + * @param triggeredDataMap - The data map into which the data + * collection should be added. + */ + private static final void addDataToMap(LCIOCollection params, double startTime, double endTime, Map> triggeredDataMap) { + // Get the readout data objects. + List triggerData = getDataList(startTime, endTime, params.getCollectionName(), params.getObjectType()); + + // Pass the readout data to the merging method. + addDataToMap(params, triggerData, triggeredDataMap); + } + + /** + * Adds data stored in a triggered collection object to the data + * map. If there is already data existing under the same + * collection, it is then merged without duplicating any objects. + * @param dataList - The collection data to be added. + * @param objectType - the object type of the collection data. + * @param triggeredDataMap - The data map into which the data + * collection should be added. + */ + private static final void addDataToMap(TriggeredLCIOData dataList, Class objectType, Map> triggeredDataMap) { + // Check that the parameters object is the same object type + // as is specified. + if(dataList.getCollectionParameters().getObjectType() != objectType) { + throw new IllegalArgumentException("Error: Can not process class type " + dataList.getCollectionParameters().getObjectType().getSimpleName() + + " as class type " + objectType.getSimpleName()); + } else { + // Note: This is safe - the above check requires that the + // object type be the parameterized type. + @SuppressWarnings("unchecked") + TriggeredLCIOData typedDataList = (TriggeredLCIOData) dataList; + Set triggerData = typedDataList.getData(); + addDataToMap(typedDataList.getCollectionParameters(), triggerData, triggeredDataMap); + } + } + + /** + * Gets a list of data objects from a collection within the time + * range specified. + * @param startTime - The (inclusive) start of the time range. + * @param endTime The (exclusive) end of the time range. + * @param collectionName - The name of the collection. + * @param objectType - The class type of the data stored in the + * collection. + * @return Returns the data in the specified time range in the + * data collection in a {@link java.util.List List}. + * @param - Specifies the class type of the data stored in + * the collection. + */ + private static final List getDataList(double startTime, double endTime, String collectionName, Class objectType) { + // Get the collection data. + ManagedLCIOData collectionData = collectionMap.get(collectionName); + + // Verify that the a collection of the indicated name exists + // and that it is the appropriate object type. + if(collectionData != null) { + if(!objectType.isAssignableFrom(collectionData.getCollectionParameters().getObjectType())) { + throw new IllegalArgumentException("Error: Expected object type " + objectType.getSimpleName() + " for collection \"" + collectionName + + ",\" but found object type " + collectionData.getCollectionParameters().getObjectType().getSimpleName() + "."); + } + } else { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); + } + + // Throw an alert if the earliest requested time precedes the + // earliest buffered time, and similarly for the latest time. + LinkedList> dataLists = collectionData.getData(); + + // Iterate through the data and collect all entries that have + // an associated truth time within the given time range. The + // lower bound is inclusive, the upper bound is exclusive. + List outputList = new ArrayList(); + for(TimedList dataList : dataLists) { + if(dataList.getTime() >= startTime && dataList.getTime() < endTime) { + // Add the items from the list to the output list. + for(Object o : dataList) { + if(objectType.isAssignableFrom(o.getClass())) { + outputList.add(objectType.cast(o)); + } else { + throw new ClassCastException("Error: Unexpected object of type " + o.getClass().getSimpleName() + " in collection \"" + + collectionName + ".\""); + } + } + } + } + + // Return the collected items. + return outputList; + } + + /** + * Calculates the total time displacement of a collection based + * on its production driver, and the time displacements of the + * input collections from which it is produced. This is processed + * recursively, so all time displacements in the production chain + * of a collection are accounted for. + * @param collectionName - The name of the collection. + * @param productionDriver - The driver which produces the + * collection. + * @return Returns the total time displacement for the collection + * in units of nanoseconds. + */ + private static final double getTotalTimeDisplacement(String collectionName, ReadoutDriver productionDriver) { + // Make sure that there are no circular dependencies. + validateDependencies(collectionName, productionDriver, new HashSet()); + + // The total time displacement is the displacement of the + // dependent collection with the largest displacement plus + // the local time displacement of the production driver. + double baseDisplacement = 0.0; + for(String dependency : productionDriver.getDependencies()) { + // All dependencies must already be registered. Check + // that it is. + double dependencyDisplacement = 0.0; + if(collectionMap.containsKey(dependency)) { + dependencyDisplacement = collectionMap.get(dependency).getCollectionParameters().getGlobalTimeDisplacement(); + } else { + throw new IllegalArgumentException("Error: Collection \"" + dependency + "\" has not been registered."); + } + + // Select the largest value. + baseDisplacement = Math.max(baseDisplacement, dependencyDisplacement); + } + + // Return the sum of the largest base displacement and the + // production driver. + return baseDisplacement + productionDriver.getTimeDisplacement(); + } + + /** + * Writes an entire {@link org.hps.readout.ReadoutDriver + * ReadoutDriver} on-trigger data collection to the specified + * output event. + * @param collectionData - The on-trigger readout data. + * @param event - The output event. + * @param - Specifies the class type of the data that is to be + * written to the output event. + */ + private static final void storeCollection(TriggeredLCIOData collectionData, EventHeader event) { + storeCollection(collectionData.getCollectionParameters().getCollectionName(), collectionData.getCollectionParameters().getObjectType(), + collectionData.getCollectionParameters().getFlags(), collectionData.getCollectionParameters().getReadoutName(), + collectionData.getData(), event); + } + + /** + * Writes the specified data to the output event. + * @param collectionName - The name of the output collection. + * @param objectType - The class of the output collection data + * objects. + * @param flags - Any LCIO flags which apply to the data. + * @param readoutName - The readout name for the data, if it is + * needed. null should be used if a readout name is + * not required. + * @param collectionData - A parameterized {@link + * java.util.Collection Collection} containing the data that is + * to be written. + * @param event - The event into which the data is to be written. + * @param - Specifies the class type of the data that is to be + * written to the output event. + */ + private static final void storeCollection(String collectionName, Class objectType, int flags, String readoutName, + Collection collectionData, EventHeader event) { + // The input collection must be a list. If it already is, + // just use it directly. Otherwise, copy the contents into an + // appropriately parameterized list. + List dataList; + if(collectionData instanceof List) { + dataList = (List) collectionData; + } else { + dataList = new ArrayList(collectionData.size()); + dataList.addAll(collectionData); + } + + // Place the data into the LCIO event. + if(readoutName == null) { + event.put(collectionName, dataList, objectType, flags); + } else { + event.put(collectionName, dataList, objectType, flags, readoutName); + } + + logger.finer(String.format("Output %d objects of type %s to collection \"%s\".", dataList.size(), objectType.getSimpleName(), collectionName)); + } + + /** + * Checks that the dependencies of a collection are valid. This + * consists of checking that any dependencies are registered with + * the data management driver and also that there are no circular + * dependencies present. + * @param collectionName - The name of the collection to check. + * @param productionDriver - The production driver of the + * collection to check. + * @param dependents - A set containing all of the collections + * which depend on this driver in the chain. Note that for the + * first call, this should be an empty set. + */ + private static final void validateDependencies(String collectionName, ReadoutDriver productionDriver, Set dependents) { + // Add the current driver to the list of dependents. + dependents.add(collectionName); + + // Check that none of the dependencies of the current driver + // are also dependencies of a driver higher in the chain. + for(String dependency : productionDriver.getDependencies()) { + // The dependency must be registered. + if(!collectionMap.containsKey(dependency)) { + throw new IllegalArgumentException("Error: Collection \"" + dependency + "\" has not been registered."); + } + + // Get the collection data for the dependency. + ManagedLCIOData collectionData = collectionMap.get(dependency); + + // Check that this dependency does not depend on the + // higher driver. + for(String dependent : dependents) { + if(collectionData.getCollectionParameters().getProductionDriver().getDependencies().contains(dependent)) { + throw new IllegalStateException("Error: Collection \"" + dependency + "\" depends on collection \"" + dependent + + ",\" but collection \"" + dependent + "\" also depends of collection \"" + dependency + ".\""); + } + } + + // If there are no detected circular dependencies, then + // perform the same check on the dependencies of this + // dependency. + Set dependencySet = new HashSet(); + dependencySet.addAll(dependents); + validateDependencies(dependency, collectionData.getCollectionParameters().getProductionDriver(), dependencySet); + } + } + + /** + * Adds the argument particle and all of its direct parents to + * the particle set. + * @param particle - The base particle. + * @param particleSet - The set that is to contain the full tree + * of particles. + */ + public static final void addParticleParents(MCParticle particle, Set particleSet) { + // Add the particle itself to the set. + particleSet.add(particle); + + // If the particle has parents, run the same method for each + // parent. + if(!particle.getParents().isEmpty()) { + for(MCParticle parent : particle.getParents()) { + addParticleParents(parent, particleSet); + } + } + } + + /** + * Sets the output file name for the triggered data file. + * @param filepath - The file path for the output file. + */ + public static final void setOutputFile(String filepath) { + outputFileName = filepath; + } + + /** + * Sets the default size of the readout window, in units of + * nanoseconds. Note that this can be overridden by specific + * drivers. + * @param nanoseconds - The length of the default readout window. + */ + public static final void setReadoutWindow(int nanoseconds) { + readoutWindow = nanoseconds; + } + /** + * sets the time passed between LCIO events. + * Used for running MC without putting bunches + * between "signal" events. + * For running of MC-generated beam, set this to 1 + * For pulser-data overlay, set to 250 + * (250*2ns = 500ns empty time) + */ + public static final void setEffectiveBunches(int value){ + effectiveBunches=value; + } + /** + * if true set buffer time to 0 + * used for pulser-data overlay MC readout. + * set to false for MC-generated beam + */ + public static final void setZeroBuffer(boolean zero){ + zeroBuffer=zero; + } + +} diff --git a/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacing.lcsim b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacing.lcsim index 790f8d19b..32c0ceda4 100644 --- a/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacing.lcsim +++ b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacing.lcsim @@ -435,8 +435,10 @@ - 200 - ${outputFile}.slcio + 250 + true + 200 + ${outputFile}.slcio From 18871248a8d7724ab5eb3b820cc7185cfd50f785 Mon Sep 17 00:00:00 2001 From: Matt Graham Date: Thu, 5 Dec 2024 10:52:56 -0800 Subject: [PATCH 3/8] everything running for nospacing with just small changes to existing readout; remove standalone no-spacing drivers --- ...ionWithPulserDataMergingReadoutDriver.java | 170 +- ...ionWithPulserDataMergingReadoutDriver.java | 44 +- ...lDigiWithPulserNoSpacingReadoutDriver.java | 1899 ----------------- ...alDigiWithPulseNoSpacingReadoutDriver.java | 153 -- ...calRawConverterNoSpacingReadoutDriver.java | 153 -- .../hps/digi/nospacing/EmptyEventsDriver.java | 147 -- .../nospacing/EmptyEventsReadoutDriver.java | 223 -- .../GTPClusterNoSpacingReadoutDriver.java | 390 ---- ...peDigiWithPulseNoSpacingReadoutDriver.java | 224 -- .../HodoscopePatternNoSpacingDriver.java | 436 ---- ...opeRawConverterNoSpacingReadoutDriver.java | 78 - .../NoSpacingTriggerDriver.java.donothing | 159 -- .../RawConverterNoSpacingReadoutDriver.java | 259 --- ...glesTrigger2019NoSpacingReadoutDriver.java | 415 ---- ...tDigiWithPulserNoSpacingReadoutDriver.java | 867 -------- .../readout/RawConverterReadoutDriver.java | 28 +- .../ecal/updated/GTPClusterReadoutDriver.java | 49 +- .../HodoscopePatternReadoutDriver.java | 18 +- .../SinglesTrigger2019ReadoutDriver.java | 73 +- .../org/hps/readout/ReadoutDataManager.java | 53 +- .../java/org/hps/readout/ReadoutDriver.java | 20 +- .../readout/TestNoSpacingModifyCurrent.lcsim | 470 ++++ .../recon/PhysicsRun2019MCRecon_KF.lcsim | 252 +++ 23 files changed, 1093 insertions(+), 5487 deletions(-) delete mode 100644 digi/src/main/java/org/hps/digi/nospacing/CalDigiWithPulserNoSpacingReadoutDriver.java delete mode 100644 digi/src/main/java/org/hps/digi/nospacing/EcalDigiWithPulseNoSpacingReadoutDriver.java delete mode 100755 digi/src/main/java/org/hps/digi/nospacing/EcalRawConverterNoSpacingReadoutDriver.java delete mode 100644 digi/src/main/java/org/hps/digi/nospacing/EmptyEventsDriver.java delete mode 100644 digi/src/main/java/org/hps/digi/nospacing/EmptyEventsReadoutDriver.java delete mode 100755 digi/src/main/java/org/hps/digi/nospacing/GTPClusterNoSpacingReadoutDriver.java delete mode 100644 digi/src/main/java/org/hps/digi/nospacing/HodoscopeDigiWithPulseNoSpacingReadoutDriver.java delete mode 100644 digi/src/main/java/org/hps/digi/nospacing/HodoscopePatternNoSpacingDriver.java delete mode 100755 digi/src/main/java/org/hps/digi/nospacing/HodoscopeRawConverterNoSpacingReadoutDriver.java delete mode 100755 digi/src/main/java/org/hps/digi/nospacing/NoSpacingTriggerDriver.java.donothing delete mode 100755 digi/src/main/java/org/hps/digi/nospacing/RawConverterNoSpacingReadoutDriver.java delete mode 100644 digi/src/main/java/org/hps/digi/nospacing/SinglesTrigger2019NoSpacingReadoutDriver.java delete mode 100755 digi/src/main/java/org/hps/digi/nospacing/SvtDigiWithPulserNoSpacingReadoutDriver.java create mode 100644 steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim create mode 100644 steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim diff --git a/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java b/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java index 461b22846..2761babf7 100644 --- a/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java +++ b/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java @@ -65,7 +65,8 @@ public abstract class DigitizationWithPulserDataMergingReadoutDriver500 MeV + /** * Specifies the name of the subdetector geometry object. */ @@ -168,7 +169,9 @@ public abstract class DigitizationWithPulserDataMergingReadoutDriver debugCellIDWithHits=new ArrayList(); // ============================================================== // ==== Driver Parameters ======================================= @@ -394,16 +397,23 @@ public void process(EventHeader event) { // Get current raw hits in pulser data. Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, PulserDataCollectionName, RawTrackerHit.class); - + if(debug)System.out.println("DigiReadout:: "+truthHitCollectionName +" local time = "+ReadoutDataManager.getCurrentTime()+" number of hits = "+hits.size()); + // Once an overlaid event is input, reset adcBufferMap to ensure that other overlaid events do not affect the current event. if(hits.size()!=0 || rawHits.size()!=0) { - // Get the set of all possible channel IDs. + // Get the set of all possible channel IDs. Set cells = getChannelIDs(); - + if(debug)System.out.println(this.getClass().getName()+":: resetting adc buffers at time = "+ReadoutDataManager.getCurrentTime()); // Reset adcBufferMap. for(Long cellID : cells) adcBufferMap.get(cellID).setAll((int) Math.round(getPedestalConditions(cellID))); - } + debugCellIDWithHits.clear(); + //if we are in no-spacing mode, just clear everything + if(doNoSpacing){ + resetBuffers(); + channelIntegrationSumMap.clear(); + } + } /* To merge MC data with pulser data, three different cases are handled separately. * Case 1: If pulser data does not have a channel in MC data, directly buffer samples @@ -432,9 +442,11 @@ public void process(EventHeader event) { // The hash map is used to check if MC data has a channel that is also in pulser data. Map hitCellIDMap = new HashMap(hits.size()); for(SimCalorimeterHit hit : hits) { - // Store the truth data. + // Store the truth data. Long hitCellID = hit.getCellID(); // For Ecal, cell ID is geometry ID; For hodo, cell ID is channel ID after hodoscope preprocessing - + if(debug) + System.out.println(this.getClass().getName()+":: process:: sim hit energy = "+hit.getRawEnergy()+" on cell = "+hitCellID); + ObjectRingBuffer hitBuffer = truthBufferMap.get(hitCellID); hitBuffer.addToCell(0, hit); @@ -499,7 +511,18 @@ public void process(EventHeader event) { // Get the truth hit energy deposition. double energyAmplitude = hit.getRawEnergy(); - + if(energyAmplitude>debugEnergyThresh && debug){ + System.out.println(this.getClass().getName()+":: process:: Putting sim hits in adcBuffer cellID = "+hitCellID); + System.out.println(this.getClass().getName()+":: process:: adding hits to adcBuffer cellID = "+hitCellID); + + System.out.println(this.getClass().getName()+":: process:: ReadoutDataManager Time = "+ReadoutDataManager.getCurrentTime()); + System.out.println(this.getClass().getName()+":: process:: hit time = "+hit.getTime()); + System.out.println(this.getClass().getName()+":: process:: readouttime() = "+readoutTime()); + + System.out.println(this.getClass().getName()+":: process:: truth energy = "+energyAmplitude); + debugCellIDWithHits.add(hitCellID); + } + if(hitCellIDMap.get(hitCellID) == 1) { // If noise should be added, calculate a random value for // the noise and add it to the truth energy deposition. @@ -531,14 +554,20 @@ public void process(EventHeader event) { double sigma = getNoiseConditions(hitCellID); currentValue += RandomGaussian.getGaussian(0, sigma); } - + // An ADC value is not allowed to exceed 4095. If a // larger value is observed, 4096 (overflow) is given // instead. (This corresponds to >2 Volts.) int digitizedValue = Math.min((int) Math.round(pedestal + currentValue), (int) Math.pow(2, nBit)); - + if(energyAmplitude>debugEnergyThresh&&debug) + System.out.println(this.getClass().getName()+":: process: writing digitized value for sample = "+i + +" post-noise current value = "+currentValue + +"; digitized value = "+digitizedValue); + // Write this value to the ADC buffer. adcBuffer.setValue(i, digitizedValue); + // + } } @@ -602,12 +631,24 @@ public void process(EventHeader event) { // contain any newly integrated hits and perform integration. List newHits = null; List newTruthRelations = null; - while(ReadoutDataManager.getCurrentTime() - readoutTime() + ReadoutDataManager.getBeamBunchSize() >= READOUT_PERIOD) { - if(newHits == null) { newHits = new ArrayList(); } - if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } - readHits(newHits, newTruthRelations); - readoutCounter++; - } + + if(doNoSpacing){ + if(newHits == null) { newHits = new ArrayList(); } + if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } + readoutCounter=0; + for(int i = 0; i < pulserDataWindow; i++){ + // System.out.println(this.getClass().getName()+":: looping over pulse data window readoutCounter = "+readoutCounter); + readHits(newHits, newTruthRelations); + readoutCounter++; + } + }else{ + while(ReadoutDataManager.getCurrentTime() - readoutTime() + ReadoutDataManager.getBeamBunchSize() >= READOUT_PERIOD) { + if(newHits == null) { newHits = new ArrayList(); } + if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } + readHits(newHits, newTruthRelations); + readoutCounter++; + } + } } // TODO: Document this. @@ -624,11 +665,17 @@ private void readHits(List newHits, List newTruth // Store the pedestal subtracted value so that it may // be checked against the integration threshold. int pedestalSubtractedValue = adcBuffer.getValue() - pedestal; - + if(pedestalSubtractedValue > integrationThreshold && debug){ + System.out.println(this.getClass().getName()+":: readHits:: Looping over adcBufferMap cellID = "+cellID); + System.out.println(this.getClass().getName()+":: readHits:: ped subtracted ADC counts = "+pedestalSubtractedValue); + } + // Get the total ADC value that has been integrated // on this channel. Integer sum = channelIntegrationSumMap.get(cellID); - + if(pedestalSubtractedValue >integrationThreshold && debug) + System.out.println(this.getClass().getName()+":: readHits:: sum = "+sum); + // If any readout hits exist on this channel, add the // current ADC values to them. @@ -641,7 +688,7 @@ private void readHits(List newHits, List newTruth // events (4 ns). This will indicate when the // integration started and, in turn, should end. channelIntegrationTimeMap.put(cellID, readoutCounter); - + if(debug)System.out.println(this.getClass().getName()+":: readHits:: Found a hit above threshold = "+cellID); // Integrate the ADC values for a number of // samples defined by NSB and threshold // crossing sample. @@ -649,7 +696,7 @@ private void readHits(List newHits, List newTruth for(int i = 0; i <= numSamplesBefore; i++) { sumBefore += adcBuffer.getValue(-(numSamplesBefore - i)); } - + if(debug)System.out.println(this.getClass().getName()+":: readHits:: sum before this sample = "+sumBefore); // This will represent the total integral sum at // the current point in time. Store it in the sum // buffer so that it may be incremented later as @@ -680,13 +727,16 @@ private void readHits(List newHits, List newTruth // If the integration sum is defined, then pulse // integration is ongoing. if(sum != null) { - // Three cases are treated separataly + if(debug)System.out.println(this.getClass().getName()+":: readHits:: integration is ongoing..."+cellID+" count = "+readoutCounter); + // Three cases are treated separataly // Case 1: CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter // Case 2: CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter // Case 3: CHANNEL_INTEGRATION_DEADTIME < numSamplesAfter if(CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter) { // Case 1 //Continue integration until NSA, the threshold-crossing sample has been added before. - if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 1: channel deadtime > numSamplesAfter "+cellID+" count = "+readoutCounter); + if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 1: integration + numSamplesAfter - 1>= readoutCounter "+cellID+" count = "+readoutCounter); channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); // Add the new ADC sample. @@ -703,7 +753,8 @@ private void readHits(List newHits, List newTruth // to data manager. else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 1: reached NSA + 1; adding new hit "+cellID+" count = "+readoutCounter); + RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); // Cycle-clock for events is 2 ns, while cycle-clock for samples is 4 ns @@ -727,8 +778,10 @@ else if (channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - } // Case 1 ends else if(CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter){ // Case 2 // Continue integration until NSA, the threshold-crossing sample has been added before. + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 2: channel deadtime == numSamplesAfter "+cellID+" count = "+readoutCounter); if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case2: integration + numSamplesAfter - 1>= readoutCounter "+cellID+" count = "+readoutCounter); // Add the new ADC sample. channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); @@ -743,6 +796,7 @@ else if(CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter){ // Case 2 // to data manager. else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager // Add a new calorimeter hit. + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 2: reached NSA + 1; adding new hit "+cellID+" count = "+readoutCounter); RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); @@ -761,8 +815,10 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutC } } // Case 2 ends else { // Case 3 + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 3: channel deadtime < numSamplesAfter "+cellID+" count = "+readoutCounter); if (channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - 1 >= readoutCounter) { - // Continue integration until CHANNEL_INTEGRATION_DEADTIME + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: integration + DEADTIME - 1>= readoutCounter "+cellID+" count = "+readoutCounter); + // Continue integration until CHANNEL_INTEGRATION_DEADTIME channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); // Add the new ADC sample. @@ -779,9 +835,12 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutC flagStartNewIntegration.put(cellID, true); } else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: integration + numSamplesAfter - 1>= readoutCounter "+cellID+" count = "+readoutCounter); if(flagStartNewIntegration.get(cellID) == true) { // Flag for previous sample is true + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: flagStartNewIntegration = true "+cellID+" count = "+readoutCounter); if(pedestalSubtractedValue <= integrationThreshold) { // If sample is less than threshold, then do not start new integration channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: too small...don't start new integration "+cellID+" count = "+readoutCounter); // Add the new ADC sample. channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); @@ -794,6 +853,7 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutC } else { // if sample is larger than threshold, a hit is added into data manager and start new integration // Add a new calorimeter hit. + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 3: new hit starting, storing old hit; adding new hit "+cellID+" count = "+readoutCounter); RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); @@ -850,6 +910,7 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutC } } else { // Flag for previous sample is false + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: flagStartNewIntegration = false "+cellID+" count = "+readoutCounter); channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); // Add the new ADC sample. @@ -865,8 +926,9 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutC } } else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//If reach NSA + 1, hit is added into data manager, and flag is set as false - // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 3: reached NSA + 1; adding new hit "+cellID+" count = "+readoutCounter); + // Add a new calorimeter hit. + RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; @@ -905,9 +967,21 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutC // Write the trigger path output data to the readout data // manager. Truth data is optional. - - + + //if running no-spacing, set the time to current time+readout + //I'm just replacing integration time here to make it easier + //note...I have no idea how using integration time works + //in the "spacing" readout. It's in local units, but the lookup + //in GTPClusters is in global??? I'm missing something + + if(doNoSpacing) + integrationTime=readoutTime()+readoutCounter * READOUT_PERIOD; + + if(debug && newHits.size()>0) + System.out.println("DigiReadout:: "+ outputHitCollectionName+" time = "+integrationTime+" adding trigger hits = "+newHits.size()); ReadoutDataManager.addData(outputHitCollectionName, integrationTime, newHits, RawCalorimeterHit.class); + if(doNoSpacing) + newHits.clear(); if(writeTriggerTruth) { ReadoutDataManager.addData(triggerTruthRelationsCollectionName, integrationTime, newTruthRelations, LCRelation.class); } @@ -1046,7 +1120,8 @@ protected Collection> getOnTriggerData(double triggerTime) } else { collectionsList = new ArrayList>(2); } - + if(debug) + System.out.println(this.getClass().getName()+":: got a trigger at time = "+triggerTime); // Readout drivers need to produce readout timestamps to // specify when they occurred in terms of simulation time. // The readout timestamp for the subdetector data should be @@ -1080,6 +1155,9 @@ protected Collection> getOnTriggerData(double triggerTime) List readoutHits = null; if(mode == 1) { readoutHits = getMode1Hits(triggerTime); } else { readoutHits = getMode3Hits(triggerTime); } + if(debug) + System.out.println(this.getClass().getName()+":: number of readoutHits = "+readoutHits.size()); + TriggeredLCIOData readoutData = new TriggeredLCIOData(mode13HitCollectionParams); readoutData.getData().addAll(readoutHits); collectionsList.add(readoutData); @@ -1159,7 +1237,10 @@ protected double getReadoutWindowBefore() { @Override protected double getTimeDisplacement() { - return localTimeOffset; + if(doNoSpacing) + return 0; + else + return localTimeOffset; } @Override @@ -1272,7 +1353,8 @@ private List getMode1Hits(double triggerTime) { // Check that there is a threshold-crossing at some // point in the ADC buffer. if(adcValues[i] > getPedestalConditions(cellID) + integrationThreshold) { - isAboveThreshold = true; + if(debug)System.out.println(this.getClass().getName()+":: found an adc value above threshold for cellID = "+cellID); + isAboveThreshold = true; break; } } @@ -1389,14 +1471,23 @@ private short[] getTriggerADCValues(long cellID, double triggerTime) { // Calculate the offset between the current position and the // trigger time. int readoutLatency = getReadoutLatency(triggerTime); - // Get the ADC pipeline. IntegerRingBuffer pipeline = adcBufferMap.get(cellID); - + if(debug && debugCellIDWithHits.contains(cellID)){ + System.out.println(this.getClass().getName()+":: getting triggered adc values with latency = "+readoutLatency+" for cellID = "+cellID); + /* + for(int k=0; k(); } + if(debug) + System.out.println(this.getClass().getName()+":: adding pulser-data strip hit for channel = "+channel+" at time = "+pulserHit.time); pulserHitQueues[channel].add(pulserHit); } @@ -306,7 +319,10 @@ public void process(EventHeader event) { if(hitQueues[channel] == null) { hitQueues[channel] = new PriorityQueue(); } - hitQueues[channel].add(stripHit); + if(debug) + System.out.println(this.getClass().getName()+":: adding simulated strip hit for channel = "+channel+" at time = "+stripHit.time); + + hitQueues[channel].add(stripHit); } // Hits older than a certain time frame should no longer @@ -628,9 +644,15 @@ protected Collection> getOnTriggerData(double triggerTime) List truthHits = new ArrayList(); List trueHitRelations = new ArrayList(); // Calculate time of first sample - double firstSample = Math.floor(((triggerTime + 256) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) + double firstSample = Math.floor(((triggerTime + triggerOffset) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) * HPSSVTConstants.SAMPLING_INTERVAL + readoutOffset; - + if(debug){ + System.out.println(this.getClass().getName()+":: trigger time = "+triggerTime+ + "; trigger offset = "+triggerOffset+"; readout latency = "+readoutLatency+ + "; readout offset = "+readoutOffset); + + System.out.println(this.getClass().getName()+":: svt first sample time for trigger = "+firstSample); + } List processedHits = new ArrayList(); for(SiSensor sensor : sensors) { @@ -693,11 +715,19 @@ protected Collection> getOnTriggerData(double triggerTime) // across all size samples. StringBuffer signalBuffer = new StringBuffer("\t\t\t\tSample Pulse :: ["); for(int sampleN = 0; sampleN < 6; sampleN++) { + //add the time offset to this. + // double sampleTime = firstSample + sampleN * HPSSVTConstants.SAMPLING_INTERVAL-timeOffset; double sampleTime = firstSample + sampleN * HPSSVTConstants.SAMPLING_INTERVAL; shape.setParameters(channel, (HpsSiSensor) sensor); double signalAtTime = hit.amplitude * shape.getAmplitudePeakNorm(sampleTime - hit.time); - totalContrib += signalAtTime; + + totalContrib += signalAtTime; signal[sampleN] += signalAtTime; + if(debug){ + System.out.println(this.getClass().getName()+":: making pulse: sample time = " + +sampleTime+"; hit time = "+hit.time); + System.out.println(this.getClass().getName()+":: signal from pulse @ time() = "+signalAtTime+"; total ADC = "+signal[sampleN]); + } meanNoise += ((HpsSiSensor) sensor).getNoise(channel, sampleN); signalBuffer.append(signalAtTime + " (" + sampleTime + ")"); @@ -736,6 +766,8 @@ protected Collection> getOnTriggerData(double triggerTime) // be passed through to readout. if(readoutCuts(hit)) { // Add the hit to the readout hits collection. + if(debug) + System.out.println(this.getClass().getName()+":: adding svt hit to triggered event"); hits.add(hit); // Associate the truth hits with the raw hit and // add them to the truth hits collection. diff --git a/digi/src/main/java/org/hps/digi/nospacing/CalDigiWithPulserNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/CalDigiWithPulserNoSpacingReadoutDriver.java deleted file mode 100644 index 9647269d1..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/CalDigiWithPulserNoSpacingReadoutDriver.java +++ /dev/null @@ -1,1899 +0,0 @@ -package org.hps.digi.nospacing; - -import static org.hps.recon.ecal.EcalUtils.fallTime; -import static org.hps.recon.ecal.EcalUtils.maxVolt; -import static org.hps.recon.ecal.EcalUtils.nBit; -import static org.hps.recon.ecal.EcalUtils.riseTime; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.hps.readout.ReadoutDriver; -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutTimestamp; -import org.hps.readout.util.DoubleRingBuffer; -import org.hps.readout.util.IntegerRingBuffer; -import org.hps.readout.util.ObjectRingBuffer; -import org.hps.readout.util.collection.LCIOCollection; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.hps.readout.util.collection.TriggeredLCIOData; -import org.hps.recon.ecal.EcalUtils; -import org.hps.util.RandomGaussian; -import org.lcsim.event.CalorimeterHit; -import org.lcsim.event.EventHeader; -import org.lcsim.event.LCRelation; -import org.lcsim.event.MCParticle; -import org.lcsim.event.RawCalorimeterHit; -import org.lcsim.event.RawTrackerHit; -import org.lcsim.event.SimCalorimeterHit; -import org.lcsim.event.base.BaseCalorimeterHit; -import org.lcsim.event.base.BaseLCRelation; -import org.lcsim.event.base.BaseRawCalorimeterHit; -import org.lcsim.event.base.BaseRawTrackerHit; -import org.lcsim.event.base.BaseSimCalorimeterHit; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.compact.Subdetector; -import org.lcsim.lcio.LCIOConstants; - -/** - * Class DigitizationWithPulserDataMergingReadoutDriver performs digitization - * of truth hits from SLIC by converting them into emulated pulses and merges pulser data, - * and then performing pulse integration. The results are output in - * the form of {@link org.lcsim.event.RawCalorimeterHit - * RawCalorimeterHit} objects. - *

- * The truth hit information is retained by also producing an output - * collection of {@link org.lcsim.event.LCRelation LCRelation} - * objects linking the raw hits to the original {@link - * org.lcsim.event.SimCalorimeterHit SimCalorimeterHit} objects from - * which they were generated. - *

- * DigitizationReadoutDriver is itself abstract. It is - * designed with the intent to function for both the hodoscope and - * the calorimeter. As such, it requires its implementing classes to - * handle certain subdetector-specific tasks. - * - * @author Tongtong Cao - */ -public abstract class CalDigiWithPulserNoSpacingReadoutDriver extends ReadoutDriver { - - // ============================================================== - // ==== LCIO Collections ======================================== - // ============================================================== - - /** - * Specifies the name of the subdetector geometry object. - */ - private String geometryName = null; - /** - * The name of the input {@link org.lcsim.event.SimCalorimeterHit - * SimCalorimeterHit} truth hit collection from SLIC. - */ - private String truthHitCollectionName = null; - /** - * The name of the input {@link org.lcsim.event.RawTrackerHit - * RawTrackerHit} collection from pulser data. - */ - private String PulserDataCollectionName = null; - /** - * The name of the digitized output {@link - * org.lcsim.event.RawCalorimeterHit RawCalorimeterHit} - * collection. - */ - private String outputHitCollectionName = null; - /** - * The name of the {@link org.lcsim.event.LCRelation LCRelation} - * collection that links output raw hits to the SLIC truth hits - * from which they were generated. - */ - private String truthRelationsCollectionName = null; - /** - * The name of the {@link org.lcsim.event.LCRelation LCRelation} - * collection that links output raw hits to the SLIC truth hits - * from which they were generated. This collection is output for - * trigger path hits, and is never persisted. - */ - private String triggerTruthRelationsCollectionName = null; - /** - * The name of the collection which contains readout hits. The - * class type of this collection will vary based on which mode - * the simulation is set to emulate. - */ - private String readoutCollectionName = null; - - // ============================================================== - // ==== Driver Options ========================================== - // ============================================================== - - /** - * Indicates whether or not noise should be simulated when - * converting truth energy depositions to the voltage amplitudes. - */ - private boolean addNoise = true; - /** - * Defines the number of photoelectrons per MeV of truth energy - * for the purpose of noise calculation. - */ - private double pePerMeV = Double.NaN; - /** - * Defines a fixed gain to be used for all subdetector channels. - * A negative value will result in gains being pulled from the - * conditions database for the run instead. Units are in MeV/ADC. - */ - private double fixedGain = -1; - /** - * Defines the pulse shape to use when simulating detector truth - * energy deposition response. - */ - private PulseShape pulseShape = PulseShape.ThreePole; - /** - * Defines the pulse time parameter. This influences the shape of - * a pulse generated from truth energy depositions and will vary - * depending on the form of pulse selected. Units are in ns. - */ - private double tp = Double.NaN; - /** - * Defines the ADC threshold needed to initiate pulse integration - * for raw hit creation. - */ - protected int integrationThreshold = 18; - /** - * Defines the number of integration samples that should be - * included in the pulse integral from before the sample that - * exceeds the integration threshold. - */ - protected int numSamplesBefore = 5; - /** - * Defines the number of integration samples that should be - * included in the pulse integral from after the sample that - * exceeds the integration threshold. - * Threshold-crossing sample is part of NSA. - */ - protected int numSamplesAfter = 25; - /** - * The format in which readout hits should be output. - */ - private int mode = 1; - /** - * Specifies whether trigger path hit truth information should be - * included in the driver output. - */ - private boolean writeTriggerTruth = false; - /** - * Specifies whether readout path truth information should be - * included in the driver output. - */ - private boolean writeTruth = false; - - // ============================================================== - // ==== Driver Parameters ======================================= - // ============================================================== - - /** - * Defines the length in nanoseconds of a hardware sample. - */ - private static final double READOUT_PERIOD = 4.0; - /** - * Serves as an internal clock variable for the driver. This is - * used to track the number of clock-cycles (1 per {@link - * org.hps.readout.ecal.updated.DigitizationReadoutDriver#READOUT_PERIOD - * READOUT_PERIOD}). - */ - private int readoutCounter = 0; - /** - * A buffer for storing pulse amplitudes representing the signals - * from the preamplifiers. These are stored in units of Volts - * with no pedestal. One buffer exists for each subdetector - * channel. - */ - private Map voltageBufferMap = new HashMap(); - /** - * Buffers the truth information for each sample period so that - * truth relations can be retained upon readout. - */ - private Map> truthBufferMap = new HashMap>(); - /** - * A buffer for storing ADC values representing the converted - * voltage values from the voltage buffers. These are stored in - * units of ADC and include a pedestal. One buffer exists for - * each subdetector channel. - */ - private Map adcBufferMap = new HashMap(); - - /** - * Stores the subdetector geometry object. - */ - private D geometry = null; - /** - * Stores the total ADC sums for each subdetector channel that is - * currently undergoing integration. - */ - private Map channelIntegrationSumMap = new HashMap(); - /** - * Stores the total ADC sums for each subdetector channel that is - * currently undergoing integration. - */ - private Map> channelIntegrationTruthMap = new HashMap>(); - /** - * Stores the time at which integration began on a given channel. - * This is used to track when the integration period has ended. - */ - private Map channelIntegrationTimeMap = new HashMap(); - // TODO: Give this documentation. - private Map> channelIntegrationADCMap = new HashMap>(); - /** - * Defines the time offset of objects produced by this driver - * from the actual true time that they should appear. - */ - private double localTimeOffset = 0; - /** - * Stores the minimum length of that must pass before a new hit - * may be integrated on a given channel. - * Unit: clock-cycle - */ - private static final int CHANNEL_INTEGRATION_DEADTIME = 8; - //private static final int CHANNEL_INTEGRATION_DEADTIME = 0; - /** - * Defines the total time range around the trigger time in which - * hits are output into the readout LCIO file. The trigger time - * position within this range is determined by {@link - * org.hps.readout.ecal.updated.DigitizationReadoutDriver#readoutOffset - * readoutOffset}. - */ - protected int readoutWindow = 48; - /** - * Sets how far from the beginning of the readout window trigger - * time should occur. A value of x, for instance would result in - * a window that starts at triggerTime - x and - * extends for a total time readoutWindow. - */ - // private int readoutOffset = 0; - - private int readoutOffset = -12; - - /** - * Sets time window of ADC samples in pulser data - */ - protected int pulserDataWindow = 48; - - /** - * To make time alignment between Ecal and hodoscope detectors, samples of - * pulser data may need to be shifted according to readout window offset - * difference between Ecal and hodoscope - */ - private int pulserSamplesShift = 0; - - - private double debugEnergyThresh=0.25; //only print debug for hits>500 MeV - - private boolean debug_=false; - - /** - * Defines the LCSim collection data for the trigger hits that - * are produced by this driver when it is emulating Mode-1 or - * Mode-3. - */ - private LCIOCollection mode13HitCollectionParams; - /** - * Defines the LCSim collection data for the trigger hits that - * are produced by this driver when it is emulating Mode-7. - */ - private LCIOCollection mode7HitCollectionParams; - /** - * Defines the LCSim collection data that links SLIC truth hits - * to the their corresponding simulated output hit. - */ - private LCIOCollection truthRelationsCollectionParams; - - /** - * Flag to point out that new integration could be started at a sample - * between CHANNEL_INTEGRATION_DEADTIME and numSamplesAfter - * for the case is less than numSamplesAfter - */ - private Map flagStartNewIntegration = new HashMap<>(); - - /** - * Since new integration could happen between CHANNEL_INTEGRATION_DEADTIME and numSamplesAfter, - * integration time needs to be assigned as parameter of ReadoutDataManager.addData(). - * Global displacement is 0 for dependency. - */ - private double integrationTime = Double.NaN; - - - // ============================================================== - // ==== To Be Re-Worked ========================================= - // ============================================================== - // TODO: We should be able to define these based on the integration parameters. - private static final int BUFFER_LENGTH = 100; - private static final int PIPELINE_LENGTH = 2000; - - @Override - public void startOfData() { - // Validate that all the collection names are defined. - if(truthHitCollectionName == null || PulserDataCollectionName == null || outputHitCollectionName == null || truthRelationsCollectionName == null - || triggerTruthRelationsCollectionName == null || readoutCollectionName == null) { - throw new RuntimeException("One or more collection names is not defined!"); - } - - // Calculate the correct time offset. This is a function of - // the integration samples and the output delay. - // Threshold-crossing sample is part of NSA. - // localTimeOffset = 4 * numSamplesAfter; - localTimeOffset = 0; - - // Validate that a real mode was selected. - if(mode != 1 && mode != 3 && mode != 7) { - throw new IllegalArgumentException("Error: Mode " + mode + " is not a supported output mode."); - } - - // Add the driver dependencies. - addDependency(truthHitCollectionName); - addDependency(PulserDataCollectionName); - - // Define the LCSim collection parameters for this driver's - // output. Note: Since these are not persisted, the flags and - // readout name are probably not necessary. - LCIOCollectionFactory.setCollectionName(outputHitCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags((0 + (1 << LCIOConstants.CHBIT_LONG) + (1 << LCIOConstants.RCHBIT_ID1))); - LCIOCollectionFactory.setReadoutName(truthHitCollectionName); - LCIOCollection hitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawCalorimeterHit.class); - ReadoutDataManager.registerCollection(hitCollectionParams, false); - - LCIOCollectionFactory.setCollectionName(triggerTruthRelationsCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollection triggerTruthCollectionParams = LCIOCollectionFactory.produceLCIOCollection(LCRelation.class); - ReadoutDataManager.registerCollection(triggerTruthCollectionParams, false); - - // Define the LCSim collection data for the on-trigger output. - LCIOCollectionFactory.setCollectionName(readoutCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - mode13HitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawTrackerHit.class); - - LCIOCollectionFactory.setCollectionName(readoutCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(1 << LCIOConstants.RCHBIT_TIME); - mode7HitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawCalorimeterHit.class); - - LCIOCollectionFactory.setCollectionName(truthRelationsCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - truthRelationsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(LCRelation.class); - - // Run the superclass method. - super.startOfData(); - } - - @SuppressWarnings("unchecked") - @Override - public void detectorChanged(Detector detector) { - // Throw an error if the geometry name is not set. - if(geometryName == null) { - throw new RuntimeException("Subdetector name is not defined!"); - } - - // Get the readout name from the subdetector geometry data. - geometry = (D) detector.getSubdetector(geometryName); - - // Update the output LCIO collections data. - LCIOCollectionFactory.setReadoutName(geometry.getReadout().getName()); - mode13HitCollectionParams = LCIOCollectionFactory.cloneCollection(mode13HitCollectionParams); - LCIOCollectionFactory.setReadoutName(geometry.getReadout().getName()); - mode7HitCollectionParams = LCIOCollectionFactory.cloneCollection(mode7HitCollectionParams); - - // Reinstantiate the buffers. - resetBuffers(); - } - - @Override - public void process(EventHeader event) { - - /* - * Get current SLIC hits and current raw hits in pulser data. - */ - - // Get current SLIC hits. - Collection hits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, - truthHitCollectionName, SimCalorimeterHit.class); - - // Get current raw hits in pulser data. - Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, - PulserDataCollectionName, RawTrackerHit.class); - if(debug_)System.out.println("DigiReadout:: "+truthHitCollectionName +" local time = "+ReadoutDataManager.getCurrentTime()+" number of hits = "+hits.size()); - // Once an overlaid event is input, reset adcBufferMap to ensure that other overlaid events do not affect the current event. - if(hits.size()!=0 || rawHits.size()!=0) { - // Get the set of all possible channel IDs. - Set cells = getChannelIDs(); - - // Reset adcBufferMap. - for(Long cellID : cells) - adcBufferMap.get(cellID).setAll((int) Math.round(getPedestalConditions(cellID))); - } - - /* To merge MC data with pulser data, three different cases are handled separately. - * Case 1: If pulser data does not have a channel in MC data, directly buffer samples - * - * Case 2: If MC data does not have a channel in pulser data, - * 1) add noise into MC hits - * 2) convert MC hits into a window of ADC samples - * 3) add pedestal - * 4) buffer samples - * - * Case 3: If MC data has a channel that is also in pulser data, - * 1) convert MC hits into a window of ADC samples - * 2) merge with samples of pulser data - * 3) buffer merged samples - * - * MC hits are digitized into ADC samples with the same time window of pulser data. - * Before the time window, the window is extended with NSB ADC samples, and values of the ADC samples are set as pedestal. - * After the time window, the window is extended with NSA ADC samples, and values of the ADC samples are set as pedestal. - * The extension is allowed since enough empty events are inserted into neighbored overlaid events. - */ - - // Add the truth hits to the truth hit buffer. The buffer is - // only incremented when the ADC buffer is incremented, which - // is handled below. - // Save cell IDs of hits as keys in the MC hit Cell ID hash map, and set values as 1. - // The hash map is used to check if MC data has a channel that is also in pulser data. - Map hitCellIDMap = new HashMap(hits.size()); - for(SimCalorimeterHit hit : hits) { - if(debug_) - System.out.println(this.getClass().getName()+":: process:: sim hit energy = "+hit.getRawEnergy()); - // Store the truth data. - Long hitCellID = hit.getCellID(); // For Ecal, cell ID is geometry ID; For hodo, cell ID is channel ID after hodoscope preprocessing - - ObjectRingBuffer hitBuffer = truthBufferMap.get(hitCellID); - hitBuffer.addToCell(0, hit); - - // Save cell IDs of hits as keys in the hit Cell ID hash map, and set values as 1. - if(hitCellIDMap.get(hitCellID) == null) - hitCellIDMap.put(hitCellID,1); - } - - // handle pulser data: case 1. - // If cellID of a raw hit is not included by keys in the MC hit Cell ID hash map for MC hits, directly buffer ADC samples. - // If included, set value as 2 for the corresponding key in the MC hit Cell ID hash map. - // Save raw hits in the raw hit hash map, where keys are raw hit cell IDs and values are raw hits. - // The hash map is used for case 3 - Map rawHitsMap = new HashMap(rawHits.size()); - for(RawTrackerHit rawHit : rawHits) { - Long rawHitID = getID(rawHit); // For Ecal, ID is geometry ID; For hodo, ID is channel ID, which is converted from geometry ID. - if(hitCellIDMap.get(rawHitID) == null) { - // Get the ADC buffer for the channel. - IntegerRingBuffer adcBuffer = adcBufferMap.get(rawHitID); - - // Get ADC samples for the channel. - short[] adcSamples = rawHit.getADCValues(); - - // Length of ADC sample array should be equal to setup for time window of ADC samples - if(adcSamples.length != pulserDataWindow) - throw new RuntimeException("Error: time window of pulser data is not correctly set."); - - // Buffer ADC samples in pulser data - for(int i = 0; i < pulserDataWindow; i++) - adcBuffer.setValue(i - pulserSamplesShift, (int)adcSamples[i]); - } - else { - hitCellIDMap.put(rawHitID, 2); - rawHitsMap.put(rawHitID, rawHit); - } - } - - // handle MC hits: case 2 and case 3 - // In the MC hit Cell ID hash map, if value for cell ID of a MC hit is 1, handle the hit as case 2. - // If value for cell ID of a MC hit is 2, handle the hit as case 3. - for(SimCalorimeterHit hit : hits) { - Long hitCellID = hit.getCellID(); - // Check to see if the hit time seems valid. This is done - // by calculating the time of the next readout cycle in - // ns and subtracting the time of the current hit (with - // adjustment for simulation time passed) from it. If the - // hit would fall in a previous readout cycle, something - // is probably wrong. - // if(READOUT_PERIOD + readoutTime() - (ReadoutDataManager.getCurrentTime() + hit.getTime()) >= READOUT_PERIOD) { - if(READOUT_PERIOD - hit.getTime() >= READOUT_PERIOD) { - throw new RuntimeException("Error: Trying to add a hit to the analog pipeline, but the time seems incorrect."); - } - - // Get the ADC buffer for the channel. - IntegerRingBuffer adcBuffer = adcBufferMap.get(hitCellID); - - // Get the pedestal for the channel. - int pedestal = (int) Math.round(getPedestalConditions(hitCellID)); - - // Get the buffer for the current truth hit's channel. - DoubleRingBuffer voltageBuffer = voltageBufferMap.get(hitCellID); - - // Get the truth hit energy deposition. - double energyAmplitude = hit.getRawEnergy(); - if(energyAmplitude>debugEnergyThresh && debug_){ - System.out.println(this.getClass().getName()+":: process:: Putting sim hits in adcBuffer cellID = "+hitCellID); - System.out.println(this.getClass().getName()+":: process:: adding hits to adcBuffer cellID = "+hitCellID); - - System.out.println(this.getClass().getName()+":: process:: ReadoutDataManager Time = "+ReadoutDataManager.getCurrentTime()); - System.out.println(this.getClass().getName()+":: process:: hit time = "+hit.getTime()); - System.out.println(this.getClass().getName()+":: process:: readouttime() = "+readoutTime()); - - - System.out.println(this.getClass().getName()+":: process:: truth energy = "+energyAmplitude); - } - if(hitCellIDMap.get(hitCellID) == 1) { - // If noise should be added, calculate a random value for - // the noise and add it to the truth energy deposition. - if(addNoise) { - energyAmplitude += getAmplitudeFluctuation(hit); - if(energyAmplitude>debugEnergyThresh&&debug_) - System.out.println(this.getClass().getName()+":: process:: added noise to energy; new energy = "+energyAmplitude); - } - - // Simulate the pulse for each position in the preamp - // pulse buffer for the subdetector channel on which the - // hit occurred. - if(energyAmplitude>debugEnergyThresh&&debug_) - System.out.println(this.getClass().getName()+":: process:: making pulse"); - for(int i = 0; i < pulserDataWindow; i++) { - // Calculate the voltage deposition for the current - // buffer time. - //double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD + readoutTime() - // - (ReadoutDataManager.getCurrentTime() + hit.getTime()) - getTimeShiftConditions(hitCellID), hitCellID); - - double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD - - hit.getTime() - - getTimeShiftConditions(hitCellID) - , hitCellID); - - if(energyAmplitude>debugEnergyThresh&&debug_){ - System.out.println(this.getClass().getName()+":: process:: pulse sample i = "+i - +" local time = "+((i + 1) * READOUT_PERIOD - hit.getTime() - getTimeShiftConditions(hitCellID)) - +" pulse amplitude = "+pulseAmplitude((i + 1) * READOUT_PERIOD - - hit.getTime() - - getTimeShiftConditions(hitCellID) - , hitCellID)); - System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i - +" voltage dep value = "+voltageDeposition); - } - // Increase the current buffer time's voltage value - // by the calculated amount. - voltageBuffer.addToCell(i, voltageDeposition); - - // Scale the current value of the preamplifier buffer - // to a 12-bit ADC value where the maximum represents - // a value of maxVolt. - double currentValue = voltageBuffer.getValue(i) * ((Math.pow(2, nBit) - 1) / maxVolt); - if(energyAmplitude>debugEnergyThresh&&debug_){ - System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i - +" pre-noise digitized value = "+currentValue); - } - // If noise should be added, calculate a random value for - // the noise and add it to the ADC value. - if(addNoise) { - double sigma = getNoiseConditions(hitCellID); - currentValue += RandomGaussian.getGaussian(0, sigma); - } - if(energyAmplitude>debugEnergyThresh&&debug_) - System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i - +" post-noise current value = "+currentValue); - // If noise should be added, calculate a random value for - // An ADC value is not allowed to exceed 4095. If a - // larger value is observed, 4096 (overflow) is given - // instead. (This corresponds to >2 Volts.) - int digitizedValue = Math.min((int) Math.round(pedestal + currentValue), (int) Math.pow(2, nBit)); - if(energyAmplitude>debugEnergyThresh&&debug_) - System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i - +" digitized value = "+digitizedValue); - - // Write this value to the ADC buffer. - adcBuffer.setValue(i, digitizedValue); - } - } - - else { - // Get ADC samples for the channel. - short[] ADCSamples = rawHitsMap.get(hitCellID).getADCValues(); - - // Get digitized samples for MC hits - int[] digitizedValue = new int[pulserDataWindow]; - - // Simulate the pulse for each position in the preamp - // pulse buffer for the subdetector channel on which the - // hit occurred. - - for(int i = 0; i < pulserDataWindow; i++) { - // Calculate the voltage deposition for the current - // buffer time. - double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD - - hit.getTime() - getTimeShiftConditions(hitCellID), hitCellID); - - // double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD + readoutTime() - // - (ReadoutDataManager.getCurrentTime() + hit.getTime()) - getTimeShiftConditions(hitCellID), hitCellID); - - // Increase the current buffer time's voltage value - // by the calculated amount. - voltageBuffer.addToCell(i, voltageDeposition); - - // Scale the current value of the preamplifier buffer - // to a 12-bit ADC value where the maximum represents - // a value of maxVolt. - double currentValue = voltageBuffer.getValue(i) * ((Math.pow(2, nBit) - 1) / maxVolt); - - // An ADC value is not allowed to exceed 4095. If a - // larger value is observed, 4096 (overflow) is given - // instead. (This corresponds to >2 Volts.) - digitizedValue[i] = Math.min((int) Math.round(currentValue), (int) Math.pow(2, nBit)); - } - - // Write this value to the ADC buffer. - // If pulserSamplesShift is larger than 0, merged sample window is [-pulserSamplesShift, pulserDataWindow] - if(pulserSamplesShift >= 0) { - for(int i = -pulserSamplesShift; i < 0; i++) adcBuffer.setValue(i , (int)ADCSamples[i + pulserSamplesShift]); - for(int i = 0; i < pulserDataWindow - pulserSamplesShift; i++) adcBuffer.setValue(i, digitizedValue[i] + ADCSamples[i + pulserSamplesShift]); - for(int i = pulserDataWindow - pulserSamplesShift; i < pulserDataWindow; i++) adcBuffer.setValue(i, digitizedValue[i]); - } - // If pulserSamplesShift is less than 0, merged sample window is [0, -pulserSamplesShift + pulserDataWindow] - else { - for(int i = 0; i < -pulserSamplesShift; i++) adcBuffer.setValue(i, digitizedValue[i]); - for(int i = -pulserSamplesShift; i < pulserDataWindow; i++) adcBuffer.setValue(i, digitizedValue[i] + ADCSamples[i + pulserSamplesShift]); - for(int i = pulserDataWindow; i < pulserDataWindow - pulserSamplesShift; i++) adcBuffer.setValue(i, (int)ADCSamples[i + pulserSamplesShift]); - } - } - } - - /* - * Next step is to integrate hits from the pulses. Hit - * integration is only performed once per readout period. The - * readout period, defined by the hardware, is by default 4 - * nanoseconds. - */ - - // Check whether the appropriate amount of time has passed to - // perform another integration step. If so, create a list to - // contain any newly integrated hits and perform integration. - List newHits = null; - List newTruthRelations = null; - if(newHits == null) { newHits = new ArrayList(); } - if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } - readoutCounter=0; - for(int i = 0; i < pulserDataWindow; i++){ - // System.out.println(this.getClass().getName()+":: looping over pulse data window readoutCounter = "+readoutCounter); - readHits(newHits, newTruthRelations); - readoutCounter++; - } - } - - // TODO: Document this. - private void readHits(List newHits, List newTruthRelations) { - // Perform hit integration as needed for each subdetector - // channel in the buffer map. - for(Long cellID : adcBufferMap.keySet()) { - // System.out.println("************** new channel ***************"); - // Get the ADC buffer for the channel. - IntegerRingBuffer adcBuffer = adcBufferMap.get(cellID); - - // Get the pedestal for the channel. - int pedestal = (int) Math.round(getPedestalConditions(cellID)); - - // Store the pedestal subtracted value so that it may - // be checked against the integration threshold. - int pedestalSubtractedValue = adcBuffer.getValue() - pedestal; - if(pedestalSubtractedValue > integrationThreshold && debug_){ - System.out.println(this.getClass().getName()+":: readHits:: Looping over adcBufferMap cellID = "+cellID); - System.out.println(this.getClass().getName()+":: readHits:: ped subtracted ADC counts = "+pedestalSubtractedValue); - } - // Get the total ADC value that has been integrated - // on this channel. - Integer sum = channelIntegrationSumMap.get(cellID); - if(pedestalSubtractedValue >integrationThreshold && debug_) - System.out.println(this.getClass().getName()+":: readHits:: sum = "+sum); - // If any readout hits exist on this channel, add the - // current ADC values to them. - - // If the ADC sum is undefined, then there is not an - // ongoing integration. If the pedestal subtracted - // value is also over the integration threshold, then - // integration should be initiated. - if(sum == null && pedestalSubtractedValue > integrationThreshold) { - // Store the current local time in units of - // events (4 ns). This will indicate when the - // integration started and, in turn, should end. - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: Found a hit above threshold = "+cellID); - - channelIntegrationTimeMap.put(cellID, readoutCounter); - - // Integrate the ADC values for a number of - // samples defined by NSB and threshold - // crossing sample. - int sumBefore = 0; - for(int i = 0; i <= numSamplesBefore; i++) { - sumBefore += adcBuffer.getValue(-(numSamplesBefore - i)); - } - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: sum before this sample = "+sumBefore); - // This will represent the total integral sum at - // the current point in time. Store it in the sum - // buffer so that it may be incremented later as - // additional samples are read. - channelIntegrationSumMap.put(cellID, sumBefore); - - // Collect and store truth information for trigger - // path hits. - channelIntegrationADCMap.put(cellID, new ArrayList()); - - // Get the truth information in the - // integration samples for this channel. - Set truthHits = new HashSet(); - for(int i = 0; i < numSamplesBefore + 4; i++) { - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(-(numSamplesBefore - i))); - truthHits.addAll(truthBufferMap.get(cellID).getValue(-(numSamplesBefore - i))); - } - - // Store all the truth hits that occurred in - // the truth buffer in the integration period - // for this channel as well. These will be - // passed through the chain to allow for the - // accessing of truth information during the - // trigger simulation. - channelIntegrationTruthMap.put(cellID, truthHits); - } - - // If the integration sum is defined, then pulse - // integration is ongoing. - if(sum != null) { - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: integration is ongoing..."); - // Three cases are treated separataly - // Case 1: CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter - // Case 2: CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter - // Case 3: CHANNEL_INTEGRATION_DEADTIME < numSamplesAfter - if(CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter) { // Case 1 - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 1: DEADTIME>NSA"); - //Continue integration until NSA, the threshold-crossing sample has been added before. - if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); - - // Add the new ADC sample. - channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); - - // Add the new truth information, if trigger - // path truth output is enabled. - if (writeTriggerTruth) { - channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); - } - } - - // If integration is complete, a hit may be added - // to data manager. - else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager - // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, - 64 * channelIntegrationTimeMap.get(cellID)); - newHits.add(newHit); - // Cycle-clock for events is 2 ns, while cycle-clock for samples is 4 ns - integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; - // Add the truth relations for this hit, if - // trigger path truth is enabled. - if (writeTriggerTruth) { - Set truthHits = channelIntegrationTruthMap.get(cellID); - for (SimCalorimeterHit truthHit : truthHits) { - newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); - } - } - } - - // Do not clear the channel for integration until deadtime has passed. - // The threshold-crossing sample counts as the first sample in the deadtime. - else if (channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - 1 <= readoutCounter - - 1) { // No new integration until over deadtime - channelIntegrationSumMap.remove(cellID); - } - } // Case 1 ends - else if(CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter){ // Case 2 - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case2: DEADTIME==NSA"); - // Continue integration until NSA, the threshold-crossing sample has been added before. - if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); - - // Add the new ADC sample. - channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); - - // Add the new truth information, if trigger - // path truth output is enabled. - if (writeTriggerTruth) { - channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); - } - } - // If integration is complete, a hit may be added - // to data manager. - else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager - // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, - 64 * channelIntegrationTimeMap.get(cellID)); - newHits.add(newHit); - // Cycle-clock for events is 2 ns, while cycle-clock for samples is 4 ns - integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; - - // Add the truth relations for this hit, if - // trigger path truth is enabled. - if (writeTriggerTruth) { - Set truthHits = channelIntegrationTruthMap.get(cellID); - for (SimCalorimeterHit truthHit : truthHits) { - newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); - } - } - channelIntegrationSumMap.remove(cellID); - } - } // Case 2 ends - else { // Case 3 - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: DEADTIME= readoutCounter) { - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: time + deadtime - 1 >= readoutCounter"+(channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - 1)+">="+readoutCounter+"....just keep integrating "+cellID); - // Continue integration until CHANNEL_INTEGRATION_DEADTIME - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); - - // Add the new ADC sample. - channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); - - // Add the new truth information, if trigger - // path truth output is enabled. - if (writeTriggerTruth) { - channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); - } - - // If sample at the end of deadtime is less than threshold, new integration could be started from next sample - if(channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME == readoutCounter && pedestalSubtractedValue <= integrationThreshold){ - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: time + deadtime - 1 >= readoutCounter ... at deadtime limit and below threshold, setting new integration flag to true"); - flagStartNewIntegration.put(cellID, true); - } - } - else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: time + NSA - 1 >= readoutCounter"+(channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1)+">="+readoutCounter+"....decide what to do "+cellID); - if(flagStartNewIntegration.get(cellID) == true) { // Flag for previous sample is true - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is true; starting new integration "+cellID); - if(pedestalSubtractedValue <= integrationThreshold) { // If sample is less than threshold, then do not start new integration - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); - - // Add the new ADC sample. - channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); - - // Add the new truth information, if trigger - // path truth output is enabled. - if (writeTriggerTruth) { - channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); - } - } - else { // if sample is larger than threshold, a hit is added into data manager and start new integration - // Add a new calorimeter hit. - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: making new hit after new integration flag is true because sample is over threshold and new integration is starting!!! "+cellID); - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, - 64 * channelIntegrationTimeMap.get(cellID)); - newHits.add(newHit); - integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; - - // Add the truth relations for this hit, if - // trigger path truth is enabled. - if (writeTriggerTruth) { - Set truthHits = channelIntegrationTruthMap.get(cellID); - for (SimCalorimeterHit truthHit : truthHits) { - newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); - } - } - - //Start new integration - channelIntegrationTimeMap.put(cellID, readoutCounter); - flagStartNewIntegration.put(cellID, false); - - // Integrate the ADC values for a number of - // samples defined by NSB from before threshold - // crossing. Note that this stops one sample - // before the current sample. This current sample - // is handled in the subsequent code block. - int sumBefore = 0; - for(int i = 0; i <= numSamplesBefore; i++) { - sumBefore += adcBuffer.getValue(-(numSamplesBefore - i)); - } - - // This will represent the total integral sum at - // the current point in time. Store it in the sum - // buffer so that it may be incremented later as - // additional samples are read. - channelIntegrationSumMap.put(cellID, sumBefore); - - // Collect and store truth information for trigger - // path hits. - channelIntegrationADCMap.put(cellID, new ArrayList()); - - // Get the truth information in the - // integration samples for this channel. - Set truthHits = new HashSet(); - for(int i = 0; i < numSamplesBefore + 4; i++) { - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(-(numSamplesBefore - i))); - truthHits.addAll(truthBufferMap.get(cellID).getValue(-(numSamplesBefore - i))); - } - - // Store all the truth hits that occurred in - // the truth buffer in the integration period - // for this channel as well. These will be - // passed through the chain to allow for the - // accessing of truth information during the - // trigger simulation. - channelIntegrationTruthMap.put(cellID, truthHits); - } - } - else { // Flag for previous sample is false - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is false; just add new sample "+cellID); - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); - - // Add the new ADC sample. - channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is false: integration sum = "+(sum + adcBuffer.getValue(0))); - // Add the new truth information, if trigger - // path truth output is enabled. - if (writeTriggerTruth) { - channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); - } - if(pedestalSubtractedValue <= integrationThreshold){ - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is false: went below threshold, setting flag to true"); - flagStartNewIntegration.put(cellID, true); - } - } - } - else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//If reach NSA + 1, hit is added into data manager, and flag is set as false - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: reached NSA + 1; adding new hit "+cellID); - // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, - 64 * channelIntegrationTimeMap.get(cellID)); - newHits.add(newHit); - integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; - - // Add the truth relations for this hit, if - // trigger path truth is enabled. - if (writeTriggerTruth) { - Set truthHits = channelIntegrationTruthMap.get(cellID); - for (SimCalorimeterHit truthHit : truthHits) { - newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); - } - } - channelIntegrationSumMap.remove(cellID); - flagStartNewIntegration.put(cellID, false); - } - } // Case 3 ends - } - - // Step to the next entry in the adc buffer. - adcBuffer.stepForward(); - - // Step to the next entry in the voltage buffer. - if(voltageBufferMap.get(cellID) != null) { // A channel could be from pulser data, while MC data has no such channel. - DoubleRingBuffer voltageBuffer = voltageBufferMap.get(cellID); - voltageBuffer.clearValue(); - voltageBuffer.stepForward(); - } - - // Step the truth buffer for this channel forward. - // The new cell should be cleared of any old values. - if(truthBufferMap.get(cellID) != null) { // A channel could be from pulser data, while MC data has no such channel. - truthBufferMap.get(cellID).stepForward(); - truthBufferMap.get(cellID).clearValue(); - } - } - - // Write the trigger path output data to the readout data - // manager. Truth data is optional. - - if(debug_)System.out.println("DigiReadout:: "+ outputHitCollectionName+" local time = "+(ReadoutDataManager.getCurrentTime()+readoutTime())+" adding trigger hits = "+newHits.size()); - ReadoutDataManager.addData(outputHitCollectionName, ReadoutDataManager.getCurrentTime()+readoutTime(), newHits, RawCalorimeterHit.class); - newHits.clear(); //remove newHits since we've already put it in data manager - if(writeTriggerTruth) { - ReadoutDataManager.addData(triggerTruthRelationsCollectionName, integrationTime, newTruthRelations, LCRelation.class); - newTruthRelations.clear(); - } - - } - - /** - * Finds all root particles associated with the interactions that - * created the argument particle. - * @param particle - The particle. - * @return Returns a {@link java.util.List List} containing each - * particle object in the argument particle's particle tree which - * has no parent particle. - */ - private static final List getRootParticleList(MCParticle particle) { - // If the particle has no parents, it should be added to the - // list and the list returned. - if(particle.getParents().isEmpty()) { - List list = new ArrayList(1); - list.add(particle); - return list; - } - - // If there is only one parent, just return the results from - // that parent. - else if(particle.getParents().size() == 1) { - return getRootParticleList(particle.getParents().get(0)); - } - - // Otherwise, run the method on each parent particle and - // return the results from that instead. - else { - // Store the parent particle roots. - List list = new ArrayList(); - - // Get the root particles for each parent and add them to - // the list. - for(MCParticle parent : particle.getParents()) { - List parentParticles = getRootParticleList(parent); - list.addAll(parentParticles); - } - - // Return the compiled particle list. - return list; - } - } - - /** - * Flattens the particle tree to a set containing both the root - * particle and any particles that are descended from it. - * @param root - The root of the particle tree. - * @return Returns a set containing the argument particle and all - * of its descendants. - */ - private static final Set getParticleTreeAsSet(MCParticle root) { - // Create a set to store the particle tree. - Set particleSet = new HashSet(); - - // Add the root particle to the tree, and then recursively - // add any daughter particles to the tree. - particleSet.add(root); - addDaughtersToSet(root, particleSet); - - // Return the particle set. - return particleSet; - } - - /** - * Adds all the daughter particles of the argument to the set. - * Daughters of each daughter particle are then recursively added - * to the set as well. - * @param particle - The particle to add. - * @param set - The set to which to add the particle. - */ - private static final void addDaughtersToSet(MCParticle particle, Set set) { - // Add each daughter particle to the set, and recursively add - // its daughters as well. - for(MCParticle daughter : particle.getDaughters()) { - set.add(daughter); - addDaughtersToSet(daughter, set); - } - } - - /** - * Gets a {@link java.util.Set Set} containing all valid channel - * IDs for the relevant subdetector geometry. - * @return Returns a Set containing all possible - * channel IDs. - */ - protected abstract Set getChannelIDs(); - - /** - * Gets a channel ID through {@link org.lcsim.event.RawTrackerHit RawTrackerHit} - * @return Returns a ID. Return a geometry ID for Ecal, while return a channel ID for hodoscope - */ - protected abstract Long getID(RawTrackerHit hit); - - /** - * Gets the gain for the indicated subdetector channel. - * @param channelID - The channel ID. - * @return Returns the value of the gain in units of ADC/MeV as a - * double. - */ - protected abstract double getGainConditions(long channelID); - - /** - * Gets the noise sigma for the indicated subdetector channel. - * @param channelID - The channel ID. - * @return Returns the value of the noise sigma as a - * double. - */ - protected abstract double getNoiseConditions(long channelID); - - /** - * Gets the int flag used to denote the appropriate - * subdetector in relation to a readout timestamp. - * @return Returns the timestamp flag as an int. - */ - protected abstract int getTimestampFlag(); - - /** - * Generate photoelectron/amplification noise for a pulse's amplitude. - * @param hit - The hit for which to generate a fluctuation. - * @return Returns a fluctuation in units GeV. - */ - protected double getAmplitudeFluctuation(CalorimeterHit hit) { - double sigma = Math.sqrt(hit.getRawEnergy() * EcalUtils.MeV / pePerMeV); - return RandomGaussian.getGaussian(0, sigma); - } - - @Override - protected Collection> getOnTriggerData(double triggerTime) { - // Create a list to store the extra collections. - List> collectionsList = null; - if(writeTruth) { - collectionsList = new ArrayList>(5); - } else { - collectionsList = new ArrayList>(2); - } - - // Readout drivers need to produce readout timestamps to - // specify when they occurred in terms of simulation time. - // The readout timestamp for the subdetector data should be - // defined as the start simulation time of the ADC buffer. - ReadoutTimestamp timestamp = new ReadoutTimestamp(getTimestampFlag(), triggerTime - (readoutOffset * 4) + 4); - - // Make the readout timestamp collection parameters object. - LCIOCollectionFactory.setCollectionName(ReadoutTimestamp.collectionName); - LCIOCollection timestampCollection = LCIOCollectionFactory.produceLCIOCollection(ReadoutTimestamp.class); - TriggeredLCIOData timestampData = new TriggeredLCIOData(timestampCollection); - timestampData.getData().add(timestamp); - collectionsList.add(timestampData); - - // Instantiate some lists to store truth data, if truth is to - // be output. - List triggerTruthHits = null; - List triggerTruthRelations = null; - if(writeTruth) { - triggerTruthHits = new ArrayList(); - triggerTruthRelations = new ArrayList(); - } - - // Get the appropriate collection of readout hits and output - // them to the readout data manager. - if(debug_)System.out.println(this.getClass().getName()+":: getting mode = "+mode+" hits on trigger time = "+triggerTime); - if(mode == 7) { - List readoutHits = getMode7Hits(triggerTime); - TriggeredLCIOData readoutData = new TriggeredLCIOData(mode7HitCollectionParams); - readoutData.getData().addAll(readoutHits); - collectionsList.add(readoutData); - } else { - List readoutHits = null; - if(mode == 1) { readoutHits = getMode1Hits(triggerTime); } - else { readoutHits = getMode3Hits(triggerTime); } - TriggeredLCIOData readoutData = new TriggeredLCIOData(mode13HitCollectionParams); - readoutData.getData().addAll(readoutHits); - collectionsList.add(readoutData); - - // FIXME: Truth information is currently only supported for Mode-1 operation. - if(writeTruth && mode == 1) { - for(RawTrackerHit hit : readoutHits) { - Collection truthHits = getTriggerTruthValues(hit.getCellID(), triggerTime); - triggerTruthHits.addAll(truthHits); - for(CalorimeterHit truthHit : truthHits) { - triggerTruthRelations.add(new BaseLCRelation(hit, truthHit)); - } - } - } - } - - // Add the truth collections if they exist. - if(writeTruth) { - // Add the truth hits to the output collection. - LCIOCollection truthHitCollection = ReadoutDataManager.getCollectionParameters(truthHitCollectionName, SimCalorimeterHit.class); - TriggeredLCIOData truthData = new TriggeredLCIOData(truthHitCollection); - truthData.getData().addAll(triggerTruthHits); - collectionsList.add(truthData); - - // MC particles need to be extracted from the truth hits - // and included in the readout data to ensure that the - // full truth chain is available. - Set truthParticles = new java.util.HashSet(); - for(SimCalorimeterHit simHit : triggerTruthHits) { - for(int i = 0; i < simHit.getMCParticleCount(); i++) { - List rootParticles = getRootParticleList(simHit.getMCParticle(i)); - for(MCParticle rootParticle : rootParticles) { - truthParticles.addAll(getParticleTreeAsSet(rootParticle)); - } - } - } - - // Create the truth MC particle collection. - LCIOCollection truthParticleCollection = ReadoutDataManager.getCollectionParameters("MCParticle", MCParticle.class); - TriggeredLCIOData truthParticleData = new TriggeredLCIOData(truthParticleCollection); - truthParticleData.getData().addAll(truthParticles); - collectionsList.add(truthParticleData); - - // Add the truth relations to the output data. - TriggeredLCIOData truthRelations = new TriggeredLCIOData(truthRelationsCollectionParams); - truthRelations.getData().addAll(triggerTruthRelations); - collectionsList.add(truthRelations); - } - - // Return the extra trigger collections. - return collectionsList; - } - - /** - * Gets the pedestal for the indicated subdetector channel. - * @param channelID - The channel ID. - * @return Returns the value of the pedestal in units of ADC as a - * double. - */ - protected abstract double getPedestalConditions(long channelID); - - - @Override - protected boolean isPersistent() { - throw new UnsupportedOperationException(); - } - - @Override - protected double getReadoutWindowAfter() { - throw new UnsupportedOperationException(); - } - - @Override - protected double getReadoutWindowBefore() { - throw new UnsupportedOperationException(); - } - - @Override - protected double getTimeDisplacement() { - return localTimeOffset; - } - - @Override - protected double getTimeNeededForLocalOutput() { - return (readoutWindow - readoutOffset) * 4.0; - } - - /** - * Gets the time shift for the indicated subdetector channel. - * @param channelID - The channel ID. - * @return Returns the value of the time shift in units of ns as - * a double. - */ - protected abstract double getTimeShiftConditions(long channelID); - - /** - * Gets the subdetector geometry object. - * @return Returns the subdetector geometry object. This will be - * an object of parameterized type D, which will is - * a subclass of {@link org.lcsim.geometry.compact.Subdetector - * Subdetector}. - */ - protected D getSubdetector() { - return geometry; - } - - /** - * Clones an object of type {@link org.lcsim.event.CalorimeterHit - * CalorimeterHit} and returns a copy that is shifted in time by - * the specified amount. - * @param hit - The hit to clone. - * @param newTime - The new time for the hit. - * @return Returns a time-shifted hit as an object of type {@link - * org.lcsim.event.CalorimeterHit CalorimeterHit}, unless the - * input hit was a {@link org.lcsim.event.SimCalorimeterHit - * SimCalorimeterHit} object, in which case the truth information - * will be retained. - */ - private static final CalorimeterHit cloneHitToTime(CalorimeterHit hit, double newTime) { - if(hit instanceof SimCalorimeterHit) { - // Cast the hit to a simulated calorimeter hit. - SimCalorimeterHit simHit = (SimCalorimeterHit) hit; - - // Create the necessary data objects to clone the - // hit. - int[] pdgs = new int[simHit.getMCParticleCount()]; - float[] times = new float[simHit.getMCParticleCount()]; - float[] energies = new float[simHit.getMCParticleCount()]; - Object[] particles = new Object[simHit.getMCParticleCount()]; - for(int i = 0; i < simHit.getMCParticleCount(); i++) { - particles[i] = simHit.getMCParticle(i); - pdgs[i] = simHit.getMCParticle(i).getPDGID(); - - // Note -- Despite returning the value for these - // methods as a double, they are actually stored - // internally as floats, so this case is always safe. - // Note -- Hit times are calculated based on the time - // of each of the contributing truth particles. This - // means that we have to give a fake truth time to - // actually get the correct hit time. - times[i] = (float) newTime; - energies[i] = (float) simHit.getContributedEnergy(i); - } - - // Create the new hit and shift its time position. - BaseSimCalorimeterHit cloneHit = new BaseSimCalorimeterHit(simHit.getCellID(), simHit.getRawEnergy(), newTime, - particles, energies, times, pdgs, simHit.getMetaData()); - - // Return the hit. - return cloneHit; - } else { - return new BaseCalorimeterHit(hit.getRawEnergy(), hit.getCorrectedEnergy(), hit.getEnergyError(), newTime, - hit.getCellID(), hit.getPositionVec(), hit.getType(), hit.getMetaData()); - } - } - - /** - * Gets the value of the pulse-shape Guassian function for the - * given parameters. - * @param t - * @param sig - * @return Returns the value of the function as a - * double. - */ - private static final double funcGaus(double t, double sig) { - return Math.exp(-t * t / (2 * sig * sig)); - } - - /** - * Generates the hits which should be output for a given trigger - * time in Mode-1 format. - * @param triggerTime - The trigger time. - * @return Returns the readout hits for the given trigger time as - * Mode-1 hits. - */ - private List getMode1Hits(double triggerTime) { - // Create a list to store the Mode-1 hits. - List hits = new ArrayList(); - if(debug_)System.out.println(this.getClass().getName()+":: getting mode1Hits for trigger time = "+triggerTime+" and readout window = "+readoutWindow); - // Iterate over each channel. - for(Long cellID : adcBufferMap.keySet()) { - // Get the ADC values at the time of the trigger. - short[] adcValues = getTriggerADCValues(cellID, triggerTime); - if(debug_){ - Collection simHits=getTriggerTruthValues(cellID, triggerTime); - if(simHits.size()==0) - System.out.println(this.getClass().getName()+":: no sim cal hits in this channel"); - else{ - for( SimCalorimeterHit hit: simHits) - System.out.println(this.getClass().getName()+":: sim cal hit in this channel with energy = "+hit.getRawEnergy()); - } - } - - // Iterate across the ADC values. If the ADC value is - // sufficiently high to produce a hit, then it should be - // written out. - boolean isAboveThreshold = false; - for(int i = 0; i < adcValues.length; i++) { - // Check that there is a threshold-crossing at some - // point in the ADC buffer. - if(adcValues[i] > getPedestalConditions(cellID) + integrationThreshold) { - if(debug_)System.out.println(this.getClass().getName()+":: found an adc value above threshold for cellID = "+cellID); - isAboveThreshold = true; - break; - } - } - - // If so, create a new hit and add it to the list. - if(isAboveThreshold) { - hits.add(new BaseRawTrackerHit(cellID, 0, adcValues)); - } - } - - // Return the hits. - return hits; - } - - /** - * Generates the hits which should be output for a given trigger - * time in Mode-3 format. - * @param triggerTime - The trigger time. - * @return Returns the readout hits for the given trigger time as - * Mode-3 hits. - */ - private List getMode3Hits(double triggerTime) { - // Create a list to store the Mode-3 hits. - List hits = new ArrayList(); - - // Iterate across the ADC values and extract Mode-3 hits. - for(Long cellID : adcBufferMap.keySet()) { - int pointerOffset = 0; - int numSamplesToRead = 0; - int thresholdCrossing = 0; - short[] adcValues = null; - short[] window = getTriggerADCValues(cellID, triggerTime); - - for(int i = 0; i < ReadoutDataManager.getReadoutWindow(); i++) { - if(numSamplesToRead != 0) { - adcValues[adcValues.length - numSamplesToRead] = window[i - pointerOffset]; - numSamplesToRead--; - if (numSamplesToRead == 0) { - hits.add(new BaseRawTrackerHit(cellID, thresholdCrossing, adcValues)); - } - } else if ((i == 0 || window[i - 1] <= getPedestalConditions(cellID) + integrationThreshold) && window[i] - > getPedestalConditions(cellID) + integrationThreshold) { - thresholdCrossing = i; - pointerOffset = Math.min(numSamplesBefore, i); - numSamplesToRead = pointerOffset + Math.min(numSamplesAfter, ReadoutDataManager.getReadoutWindow() - i - pointerOffset - 1); - adcValues = new short[numSamplesToRead]; - } - } - } - - // Return the hits. - return hits; - } - - /** - * Generates the hits which should be output for a given trigger - * time in Mode-7 format. - * @param triggerTime - The trigger time. - * @return Returns the readout hits for the given trigger time as - * Mode-7 hits. - */ - private List getMode7Hits(double triggerTime) { - // Create a list to store the Mode-7 hits. - List hits = new ArrayList(); - - // Iterate across the ADC values and extract Mode-7 hits. - for(Long cellID : adcBufferMap.keySet()) { - int adcSum = 0; - int pointerOffset = 0; - int numSamplesToRead = 0; - int thresholdCrossing = 0; - short[] window = getTriggerADCValues(cellID, triggerTime); - - // Generate Mode-7 hits. - if(window != null) { - for(int i = 0; i < ReadoutDataManager.getReadoutWindow(); i++) { - if (numSamplesToRead != 0) { - adcSum += window[i - pointerOffset]; - numSamplesToRead--; - if(numSamplesToRead == 0) { - hits.add(new BaseRawCalorimeterHit(cellID, adcSum, 64 * thresholdCrossing)); - } - } else if((i == 0 || window[i - 1] <= getPedestalConditions(cellID) + integrationThreshold) - && window[i] > getPedestalConditions(cellID) + integrationThreshold) { - thresholdCrossing = i; - pointerOffset = Math.min(numSamplesBefore, i); - numSamplesToRead = pointerOffset + Math.min(numSamplesAfter, ReadoutDataManager.getReadoutWindow() - i - pointerOffset - 1); - adcSum = 0; - } - } - } - } - - // Return the hits. - return hits; - } - - private int getReadoutLatency(double triggerTime) { - return ((int) ((ReadoutDataManager.getCurrentTime() - triggerTime) / 4.0)) + readoutOffset; - } - - /** - * Gets the ADC values for the trigger readout window for the - * requested cell ID and returns them as a short - * primitive array. - * @param cellID - The ID for the channel of the requested ADC - * value array. - * @param triggerTime - The time of the trigger to be written. - * @return Returns the ADC values in a time range equal to the - * readout window positioned around the trigger time as array of - * short primitives. - */ - private short[] getTriggerADCValues(long cellID, double triggerTime) { - // Calculate the offset between the current position and the - // trigger time. - int readoutLatency = getReadoutLatency(triggerTime); - - // Get the ADC pipeline. - IntegerRingBuffer pipeline = adcBufferMap.get(cellID); - - // Extract the ADC values for the requested channel. - short[] adcValues = new short[readoutWindow]; - if(debug_)System.out.println(this.getClass().getName()+":: getTriggerADCValues:: latency = "+readoutLatency); - for(int i = 0; i < readoutWindow; i++) { - adcValues[i] = (short) pipeline.getValue(-(readoutLatency - i - 1)).intValue(); - if(debug_) - System.out.println(this.getClass().getName()+":: getTriggerADCValues:: "+" pipeline index = "+ (-(readoutLatency - i - 1)) - +" adcValue["+i+"] = "+adcValues[i]); - } - - // Return the result. - return adcValues; - } - - /** - * Gets a list of all truth hits that occurred in the ADC output - * window around a given trigger time from the truth buffer. - * @param cellID - The channel ID. - * @param triggerTime - The trigger time. - * @return Returns all truth hits that occurred within the ADC - * readout window around the trigger time for the specified - * channel. - */ - private Collection getTriggerTruthValues(long cellID, double triggerTime) { - // Calculate the offset between the current position and the - // trigger time. - int readoutLatency = getReadoutLatency(triggerTime); - - // Get the truth pipeline. - ObjectRingBuffer pipeline = truthBufferMap.get(cellID); - - // Extract the truth for the requested channel. Note that one - // extra sample is included over the range of ADC samples as - // sometimes, the truth hit occurs a little earlier than may - // be expected due to a delay from pulse propagation. - double baseHitTime = 0; - List channelHits = new ArrayList(); - for(int i = 0; i < readoutWindow + 4; i++) { - // Hit times should be specified with respect to the - // start of the readout window. - for(SimCalorimeterHit hit : pipeline.getValue(-(readoutLatency - i))) { - channelHits.add((SimCalorimeterHit) cloneHitToTime(hit, baseHitTime)); - } - - // Increment the base hit time. - baseHitTime += 4.0; - } - - // Return the result. - return channelHits; - } - - /** - * Returns pulse amplitude at the given time (relative to hit time). Gain is - * applied. - * - * @param time Units of ns. Relative to hit time (negative=before the start - * of the pulse). - * @param cellID Crystal ID as returned by hit.getCellID(). - * @return Amplitude, units of volts/GeV. - */ - private double pulseAmplitude(double time, long cellID) { - //normalization constant from cal gain (MeV/integral bit) to amplitude gain (amplitude bit/GeV) - // Determine the gain. Gain may either be fixed across all - // channels, or be obtained from the conditions database - // depending on the behavior defined in the steering file. - // The gain should also be normalized. - double gain; - if(fixedGain > 0) { - gain = READOUT_PERIOD / (fixedGain * EcalUtils.MeV * ((Math.pow(2, nBit) - 1) / maxVolt)); - } else { - gain = READOUT_PERIOD / (getGainConditions(cellID) * EcalUtils.MeV * ((Math.pow(2, nBit) - 1) / maxVolt)); - } - - // Calculate the correct pulse amplitude and return it. - return gain * pulseAmplitude(time, pulseShape, tp); - } - - /** - * Calculates the amplitude of a pulse at the given time, where - * the time is relative to the hit time, and for a given pulse - * shape. - * @param time - The time in the pulse. This is in units of ns - * and is relative to the hit time. A negative value represents - * the pulse shape before the hit occurs. - * @param shape - The type of pulse for which the calculation is - * to be performed. - * @param shapingTime - A fitting parameter that influences the - * shape of the pulse. - * @return Returns the pulse amplitude in units of inverse ns. - * The amplitude is normalized so that the pulse integral is one. - */ - private static final double pulseAmplitude(double time, PulseShape shape, double shapingTime) { - // There can not be a pulse response from a hit that has not - // occurred yet, so any time before zero must produce a pulse - // amplitude of zero as well. - if(time <= 0.0) { - return 0.0; - } - - // Perform the calculation appropriate to the specified pulse - // shape. - switch (shape) { - case CRRC: - // Peak Location: tp - // Peak Value: 1/(tp * e) - return ((time / (shapingTime * shapingTime)) * Math.exp(-time / shapingTime)); - case DoubleGaussian: - // According to measurements, the output signal can - // be fitted by two Gaussians: one for the rise of - // the signal and one for the fall. - // Peak Location: 3 * riseTime - // Peak Value: 1/norm - double norm = ((riseTime + fallTime) / 2) * Math.sqrt(2 * Math.PI); //to ensure the total integral is equal to 1: = 33.8 - return funcGaus(time - 3 * riseTime, (time < 3 * riseTime) ? riseTime : fallTime) / norm; - case ThreePole: - // Peak Location: 2 * tp - // Peak Value: 2/(tp * e^2) - return ((time * time / (2 * shapingTime * shapingTime * shapingTime)) * Math.exp(-time / shapingTime)); - default: - return 0.0; - } - } - - /** - * Gets the local time for this driver. - * @return Returns the local time for this driver. - */ - private double readoutTime() { - return readoutCounter * READOUT_PERIOD; - //return ReadoutDataManager.getCurrentTime(); - } - - /** - * Resets the driver buffers to their default values. - * @return Returns true if the buffers were reset - * successfully, and false if they were not. - */ - private void resetBuffers() { - // Reset each of the buffer maps. - adcBufferMap.clear(); - truthBufferMap.clear(); - voltageBufferMap.clear(); - - // Get the set of all possible channel IDs. - Set cells = getChannelIDs(); - - // Insert a new buffer for each channel ID. - for(Long cellID : cells) { - voltageBufferMap.put(cellID, new DoubleRingBuffer(BUFFER_LENGTH)); - truthBufferMap.put(cellID, new ObjectRingBuffer(PIPELINE_LENGTH)); - adcBufferMap.put(cellID, new IntegerRingBuffer(PIPELINE_LENGTH, (int) Math.round(getPedestalConditions(cellID)))); - - truthBufferMap.get(cellID).stepForward(); - - flagStartNewIntegration.put(cellID, false); - } - } - - /** - * Sets whether randomized noise should be added to SLIC truth - * energy depositions when simulating subdetector hits. This is - * true by default. - * @param state - true means that noise will be - * added and false that it will not. - */ - public void setAddNoise(boolean state) { - addNoise = state; - } - - /** - * Defines the name of the subdetector geometry object. - * @param ecalName - The subdetector name. - */ - public void setGeometryName(String value) { - geometryName = value; - } - - /** - * Sets a single uniform value for the gain on all channels. This - * will override the conditions database value. If set negative, - * the conditions database values will be used instead. Gains are - * defined in units of MeV/ADC. This defaults to -1. - * @param value - The uniform gain to be employed across all - * channels in units of MeV/ADC. A negative value indicates to - * use the conditions database values. - */ - public void setFixedGain(double value) { - fixedGain = value; - } - - /** - * Sets the threshold that a pulse sample must exceed before - * pulse integration may commence. Units are in ADC and the - * default value is 12 ADC. - * @param value - The pulse integration threshold, in units of - * ADC. - */ - public void setIntegrationThreshold(int value) { - integrationThreshold = value; - } - - /** - * Sets the name of the input truth hit collection name. - * @param collection - The collection name. - */ - public void setInputHitCollectionName(String collection) { - truthHitCollectionName = collection; - } - - /** - * Sets the name of the input pulser data collection name. - * @param collection - The collection name. - */ - public void setInputPulserDataCollectionName(String collection) { - PulserDataCollectionName = collection; - } - - /** - * Sets the operational mode of the simulation. This affects the - * form of the readout hit output. Mode may be set to the values - * 1, 3, or 7. - * @param value - The operational mode. - */ - public void setMode(int value) { - mode = value; - } - - /** - * Defines the number of samples from after a threshold-crossing - * pulse sample that should be included in the pulse integral. - * Units are in clock-cycles (4 ns samples) and the default value - * is 20 samples. - * @param value - The number of samples. - */ - public void setNumberSamplesAfter(int value) { - numSamplesAfter = value; - } - - /** - * Defines the number of samples from before a threshold-crossing - * pulse sample that should be included in the pulse integral. - * Units are in clock-cycles (4 ns samples) and the default value - * is 5 samples. - * @param value - The number of samples. - */ - public void setNumberSamplesBefore(int value) { - numSamplesBefore = value; - } - - /** - * Sets the name of the hits produced by this driver for use in - * the trigger simulation.

- * Note that this is not the name of the collection output when a - * trigger occurs. For this value, see the method {@link - * org.hps.readout.ecal.updated.DigitizationReadoutDriver#setReadoutHitCollectionName(String) - * setReadoutHitCollectionName(String)} instead. - * @param collection - The collection name. - */ - public void setOutputHitCollectionName(String collection) { - outputHitCollectionName = collection; - } - - @Override - public void setPersistent(boolean state) { - throw new UnsupportedOperationException(); - } - - /** - * Sets the number of photoelectrons per MeV of deposited energy. - * This value is used in the simulation of subdetector hit noise - * due to photoelectron statistics. - * @param value - The number of photoelectrons per MeV. - */ - public void setPhotoelectronsPerMeV(double value) { - pePerMeV = value; - } - - /** - * Sets the pulse-shape model used to simulate pre-amplifier - * pulses. The default value is ThreePole. - * @param pulseShape - The name of the pulse shape model that is - * to be employed. Valid options are ThreePole, - * DoubleGaussian, or CRRC. - */ - public void setPulseShape(String pulseShape) { - this.pulseShape = PulseShape.valueOf(pulseShape); - } - - /** - * Sets the shaper time parameter for pulse simulation. The value - * depends on the pulse shape selected. For the default pulse - * shape ThreePole, it is equal to the RC, or half - * the peaking time (9.6 ns). - * @param value The pulse time parameter in units of nanoseconds. - */ - public void setPulseTimeParameter(double value) { - tp = value; - } - - /** - * Sets the name of the triggered hit output collection. This - * collection will hold all hits produced when a trigger occurs. - *

- * Note that this collection is different from the hits produced - * for internal usage by the readout simulation. For this value, - * see the method {@link - * org.hps.readout.ecal.updated.DigitizationReadoutDriver#setOutputHitCollectionName(String) - * setOutputHitCollectionName(String)} instead. - * @param collection - The collection name. - */ - public void setReadoutHitCollectionName(String collection) { - readoutCollectionName = collection; - } - - /** - * Sets the number of samples by which readout hit pulse-crossing - * samples should be offset. Units are in clock-cycles (intervals - * of 4 ns). - * @param value - The offset of the pulse-crossing sample in - * units of clock-cycles (4 ns intervals). - */ - public void setReadoutOffset(int value) { - readoutOffset = value; - } - - /** - * Sets time window of ADC samples in pulser data. - * Units are in clock-cycles (intervals of 4 ns). - * @param value - The time window of ADC samples in pulser data in - * units of clock-cycles (4 ns intervals). - */ - public void setPulserDataWindow(int value) { - pulserDataWindow = value; - } - - /** - * Sets sample shift between Ecal and hodoscope detectors. - * The shift is equal to (Hodo_readout_offset - Ecal_readout_offset) / 4. - * @param value - The shift of ADC samples in pulser data in - * units of clock-cycles (4 ns intervals). - */ - public void setPulserSamplesShift(int value) { - pulserSamplesShift = value; - } - - /** - * Sets the size of the readout window, in units of 4 ns samples. - * @param value - The readout window. - */ - public void setReadoutWindow(int value) { - readoutWindow = value; - } - - @Override - public void setReadoutWindowAfter(double value) { - throw new UnsupportedOperationException(); - } - - @Override - public void setReadoutWindowBefore(double value) { - throw new UnsupportedOperationException(); - } - - /** - * Sets the name of the collection which contains the relations - * between truth hits from SLIC and the calorimeter hit output. - * This is specifically for the trigger path hits. - * @param collection - The collection name. - */ - public void setTriggerPathTruthRelationsCollectionName(String collection) { - triggerTruthRelationsCollectionName = collection; - } - - /** - * Sets the name of the collection which contains the relations - * between truth hits from SLIC and the calorimeter hit output. - * This is specifically for the readout path hits. - * @param collection - The collection name. - */ - public void setTruthRelationsCollectionName(String collection) { - truthRelationsCollectionName = collection; - } - - /** - * Sets whether subdetector truth data for trigger path hits is - * to be produced or not. - * @param state - true indicates that the truth data - * should be created, and false that it should not. - */ - public void setWriteTriggerPathTruth(boolean state) { - writeTriggerTruth = state; - } - - /** - * Sets whether subdetector truth data for readout path hits is - * to be written to the output LCIO file or not. - * @param state - true indicates that the truth data - * should be written, and false that it should not. - */ - public void setWriteTruth(boolean state) { - writeTruth = state; - } - - /** - * Enumerable PulseShape defines the allowed types - * of pulses that may be used to emulate the subdetector response - * to incident energy. - * - * @author Sho Uemura - */ - public enum PulseShape { - CRRC, DoubleGaussian, ThreePole - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EcalDigiWithPulseNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EcalDigiWithPulseNoSpacingReadoutDriver.java deleted file mode 100644 index b1c505f17..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/EcalDigiWithPulseNoSpacingReadoutDriver.java +++ /dev/null @@ -1,153 +0,0 @@ -package org.hps.digi.nospacing; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.util.Set; - -import org.hps.readout.ReadoutDriver; -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.conditions.ecal.EcalChannelConstants; -import org.hps.conditions.ecal.EcalConditions; -import org.hps.readout.ReadoutTimestamp; -import org.hps.recon.ecal.EcalUtils; -import org.lcsim.event.RawTrackerHit; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.HPSEcal3; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.daqconfig2019.FADCConfigEcal2019; -import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection; - -/** - * Class EcalDigiWithPulseNoSpacingReadoutDriver is an implementation of the - * {@link org.hps.digi.nospacing.CalDigiWithPulserNoSpacingReadoutDriver} for a subdetector of type {@link - * org.lcsim.geometry.subdetector.HPSEcal3 HPSEcal3}. It handles all - * of the calorimeter-specific functions needed by the superclass. - * - * @author Tongtong Cao - */ -public class EcalDigiWithPulseNoSpacingReadoutDriver extends CalDigiWithPulserNoSpacingReadoutDriver { - // The DAQ configuration manager for FADC parameters. - private FADCConfigEcal2019 config = new FADCConfigEcal2019(); - private boolean configStat = false; // Indicates if DAQ configuration is loaded - - // The number of nanoseconds in a clock-cycle (sample). - private static final int nsPerSample = 4; - - - /** Stores the conditions for this subdetector. */ - private EcalConditions ecalConditions = null; - - /** Stores the channel collection for this subdetector. */ - private EcalChannelCollection geoMap = new EcalChannelCollection(); - - public EcalDigiWithPulseNoSpacingReadoutDriver() { - // Set the default values for each subdetector-dependent - // parameter. - setGeometryName("Ecal"); - - setInputHitCollectionName("EcalHits"); - setOutputHitCollectionName("EcalRawHits"); - setTruthRelationsCollectionName("EcalTruthRelations"); - setTriggerPathTruthRelationsCollectionName("TriggerPathTruthRelations"); - setReadoutHitCollectionName("EcalReadoutHits"); - - setPhotoelectronsPerMeV(EcalUtils.photoelectronsPerMeV); - setPulseTimeParameter(9.6); - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - - // Load the DAQ settings from the configuration manager. - numSamplesAfter = daq.getEcalFADCConfig().getNSA() / nsPerSample; - numSamplesBefore = daq.getEcalFADCConfig().getNSB() / nsPerSample; - readoutWindow = daq.getEcalFADCConfig().getWindowWidth() / nsPerSample; - pulserDataWindow = readoutWindow; - - // Get the FADC configuration. - config = daq.getEcalFADCConfig(); - configStat = true; - } - }); - } - } - - - @Override - public void detectorChanged(Detector detector) { - // Get a copy of the calorimeter conditions for the detector. - ecalConditions = DatabaseConditionsManager.getInstance().getEcalConditions(); - - // Store the calorimeter conditions table for converting between - // geometric IDs and channel objects. - geoMap = DatabaseConditionsManager.getInstance().getCachedConditions(EcalChannelCollection.class, "ecal_channels").getCachedData(); - - // Run the superclass method. - super.detectorChanged(detector); - } - - @Override - protected Set getChannelIDs() { - return getSubdetector().getNeighborMap().keySet(); - } - - @Override - protected Long getID(RawTrackerHit hit) { - return hit.getCellID(); - } - - @Override - protected double getGainConditions(long cellID) { - return findChannel(cellID).getGain().getGain(); - } - - @Override - protected double getNoiseConditions(long channelID) { - return findChannel(channelID).getCalibration().getNoise(); - } - - protected double getPedestalConditions(long cellID) { - return findChannel(cellID).getCalibration().getPedestal(); - - } - - @Override - protected double getTimeShiftConditions(long cellID) { - return findChannel(cellID).getTimeShift().getTimeShift(); - } - - @Override - protected int getTimestampFlag() { - return ReadoutTimestamp.SYSTEM_ECAL; - } - - /** - * Gets the channel parameters for a given channel ID. - * @param cellID - The long ID value that represents - * the channel. This is typically acquired from the method {@link - * org.lcsim.event.CalorimeterHit#getCellID() getCellID()} in a - * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} object. - * @return Returns the channel parameters for the channel as an - * {@link org.hps.conditions.ecal.EcalChannelConstants - * EcalChannelConstants} object. - */ - private EcalChannelConstants findChannel(long cellID) { - return ecalConditions.getChannelConstants(ecalConditions.getChannelCollection().findGeometric(cellID)); - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EcalRawConverterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EcalRawConverterNoSpacingReadoutDriver.java deleted file mode 100755 index 670e827a7..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/EcalRawConverterNoSpacingReadoutDriver.java +++ /dev/null @@ -1,153 +0,0 @@ -package org.hps.digi.nospacing; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; - -import org.hps.readout.ReadoutDriver; -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.conditions.ecal.EcalChannelConstants; -import org.hps.conditions.ecal.EcalConditions; -//import org.hps.readout.RawConverterNoSpacingReadoutDriver; -import org.hps.readout.rawconverter.AbstractMode3RawConverter; -import org.hps.readout.rawconverter.EcalReadoutMode3RawConverter; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.daqconfig.ConfigurationManager; -import org.hps.record.daqconfig.DAQConfig; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.HPSEcal3; - -/** - * EcalRawConverterNoSpacingReadoutDriver is an implementation of - * {@link org.hps.readout.RawConverterReadoutDriver - * RawConverterReadoutDriver} for the calorimeter subdetector. - * - * @see org.hps.readout.RawConverterReadoutDriver - */ -public class EcalRawConverterNoSpacingReadoutDriver extends RawConverterNoSpacingReadoutDriver { - /** - * The converter object responsible for processing raw hits into - * proper {@link org.lcsim.event.CalorimeterHit CalorimeterHit} - * objects. - */ - private EcalReadoutMode3RawConverter converter = new EcalReadoutMode3RawConverter(); - - /** - * Cached copy of the calorimeter conditions. All calorimeter - * conditions should be called from here, rather than by directly - * accessing the database manager. - */ - private EcalConditions ecalConditions = null; - - /** - * Instantiates the driver with the correct default parameters. - */ - public EcalRawConverterNoSpacingReadoutDriver() { - super("EcalRawHits", "EcalCorrectedHits"); - setSkipBadChannels(true); - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfiguration2016AppliedintoReadout(boolean state) { - // Track changes in the DAQ configuration. - if (state) { - ConfigurationManager.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig daq = ConfigurationManager.getInstance(); - - // Load the DAQ settings from the configuration manager. - getConverter().setNumberSamplesAfter(daq.getFADCConfig().getNSA()); - getConverter().setNumberSamplesBefore(daq.getFADCConfig().getNSB()); - - // Get the FADC configuration. - getConverter().setFADCConfig2016(daq.getFADCConfig()); - } - }); - } - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // Track changes in the DAQ configuration. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - - // Load the DAQ settings from the configuration manager. - getConverter().setNumberSamplesAfter(daq.getEcalFADCConfig().getNSA()); - getConverter().setNumberSamplesBefore(daq.getEcalFADCConfig().getNSB()); - - // Get the FADC configuration. - getConverter().setFADCConfigEcal2019(daq.getEcalFADCConfig()); - } - }); - } - } - - /** - * Indicates whether or not data from channels flagged as "bad" - * in the conditions system should be ignored. true - * indicates that they should be ignored, and false - * that they should not. - * @param apply - true indicates that "bad" channels - * will be ignored and false that they will not. - */ - @Override - public void setSkipBadChannels(boolean state) { - super.skipBadChannels = state; - } - - @Override - protected AbstractMode3RawConverter getConverter() { - return converter; - } - - @Override - protected String getSubdetectorReadoutName(Detector detector) { - HPSEcal3 calorimeterGeometry = (HPSEcal3) detector.getSubdetector("Ecal"); - return calorimeterGeometry.getReadout().getName(); - } - - @Override - protected boolean isBadChannel(long channelID) { - return findChannel(channelID).isBadChannel(); - } - - @Override - protected void updateDetectorDependentParameters(Detector detector) { - ecalConditions = DatabaseConditionsManager.getInstance().getEcalConditions(); - } - - /** - * Gets the channel parameters for a given channel ID. - * @param cellID - The long ID value that represents - * the channel. This is typically acquired from the method {@link - * org.lcsim.event.CalorimeterHit#getCellID() getCellID()} in a - * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} object. - * @return Returns the channel parameters for the channel as an - * {@link org.hps.conditions.ecal.EcalChannelConstants - * EcalChannelConstants} object. - */ - private EcalChannelConstants findChannel(long cellID) { - return ecalConditions.getChannelConstants(ecalConditions.getChannelCollection().findGeometric(cellID)); - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsDriver.java deleted file mode 100644 index 5f99e4ee6..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsDriver.java +++ /dev/null @@ -1,147 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; -import java.util.List; - -import org.lcsim.event.EventHeader; -import org.lcsim.event.base.BaseLCSimEvent; -import org.lcsim.event.EventHeader.LCMetaData; -import org.lcsim.geometry.Detector; -import org.lcsim.conditions.ConditionsManager; -import org.lcsim.util.Driver; -import org.lcsim.event.EventHeader; - -import org.lcsim.event.SimCalorimeterHit; -import org.lcsim.event.SimTrackerHit; -import org.lcsim.event.MCParticle; - -/* - * This driver will create an empty lcsim event - * and call super.process() so that all of the registered - * drivers run over this empty event. - * - * - */ - - -public class EmptyEventsDriver extends Driver{ - - private int nEmptyToInsert=250; //number of events to insert between real MC events - private int emptyCount=0; //counter - //make collections for all needed by readout sim - EventHeader emptyEvent; - boolean gotFirstRealEvent=false; - //names of collections - Map baseCollectionMap=new HashMap>(); - - @Override - public void detectorChanged(Detector det) { - - // in here, make empty collections. - // since these are members and don't change - // should be able just keep adding some one - // to empty "event"....hopefully this speeds - // things up a lot. - - System.out.println("EmptyEventsDriver:: Setting up base map"); - - baseCollectionMap.put("EcalHits",SimCalorimeterHit.class); - baseCollectionMap.put("HodoscopeHits",SimTrackerHit.class); - baseCollectionMap.put("MCParticle",MCParticle.class); - baseCollectionMap.put("TrackerHits",SimTrackerHit.class); - baseCollectionMap.put("TrackerHitsECal",SimTrackerHit.class); - - - } - - - @Override - public void process(EventHeader event) { - // System.out.println("EmptyEventsDriver:: processing event!"); - - if(!gotFirstRealEvent){ - System.out.println("EmptyEventsDriver:: Making the empty bunch"); - //make an empty lcsim event based on this, real event - emptyEvent=makeEmptyMCEvent(event); - gotFirstRealEvent=true; - } - - // check if we should add empty or continue - - if(emptyCount mcCollections = new ArrayList(mcEvent.getMetaData()); - for (LCMetaData mcCollectionMeta : mcCollections) { - String mcCollectionName = mcCollectionMeta.getName(); - // check to see if this collection is in the base map - // if so, copy collection, clear it, and put it in new event. - if (baseCollectionMap.containsKey(mcCollectionName)){ - List collection =new ArrayList<> ((List) mcEvent.get(mcCollectionName)); - collection.clear(); //remove element - System.out.println("EmptyEventsDriver:: inserting collection "+mcCollectionName); - - this.putCollection(mcCollectionMeta, collection, lcsimEvent); - } - } - System.out.println("EmptyEventsDriver::returning empty event"); - return lcsimEvent; - } - - - - protected void putCollection(LCMetaData collection, List entries, EventHeader event) { - String[] readout = collection.getStringParameters().get("READOUT_NAME"); - if (readout != null) { - event.put(collection.getName(), entries, collection.getType(), collection.getFlags(), readout[0]); - } else { - event.put(collection.getName(), entries, collection.getType(), collection.getFlags()); - } - if (this.getHistogramLevel() > HLEVEL_NORMAL) - System.out.println("Putting collection " + collection.getName() + " into event."); - } - - private void clearEvent(EventHeader event){ - List evtCollections = new ArrayList(event.getMetaData()); - for (LCMetaData evtCollectionMeta : evtCollections) { - String colName=evtCollectionMeta.getName(); - List col=(List)event.get(colName); - if(col.size()>0){ - System.out.println("clearing collection "+colName+" of size = "+col.size()); - ((List)event.get(colName)).clear(); - System.out.println(".....new size = "+col.size()); - } - } - } - -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsReadoutDriver.java deleted file mode 100644 index 6267057dd..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsReadoutDriver.java +++ /dev/null @@ -1,223 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.List; - -import org.lcsim.event.EventHeader; -import org.lcsim.event.base.BaseLCSimEvent; -import org.lcsim.event.EventHeader.LCMetaData; -import org.lcsim.geometry.Detector; -import org.lcsim.conditions.ConditionsManager; -import org.lcsim.util.Driver; -import org.lcsim.event.EventHeader; - -import org.lcsim.event.SimCalorimeterHit; -import org.lcsim.event.SimTrackerHit; -import org.lcsim.event.MCParticle; - -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutDriver; -import org.hps.readout.ReadoutTimestamp; - - -/* - * This driver will create an empty lcsim event - * and call super.process() so that all of the registered - * drivers run over this empty event. - * - * - */ - - -public class EmptyEventsReadoutDriver extends ReadoutDriver{ - - private int nEmptyToInsert=250; //number of events to insert between real MC events - private int emptyCount=0; //counter - //make collections for all needed by readout sim - EventHeader emptyEvent; - boolean gotFirstRealEvent=false; - //names of collections - Map baseCollectionMap=new HashMap>(); - - List baseCollectionNames=Arrays.asList("EcalHits","HodoscopeHits","MCParticle","TrackerHits","TrackerHitsECal"); - List mcCollections = null; - @Override - public void detectorChanged(Detector det) { - - // in here, make empty collections. - // since these are members and don't change - // should be able just keep adding some one - // to empty "event"....hopefully this speeds - // things up a lot. - - System.out.println("EmptyEventsReadoutDriver:: Setting up base map"); - - baseCollectionMap.put("EcalHits",SimCalorimeterHit.class); - baseCollectionMap.put("HodoscopeHits",SimTrackerHit.class); - baseCollectionMap.put("MCParticle",MCParticle.class); - baseCollectionMap.put("TrackerHits",SimTrackerHit.class); - baseCollectionMap.put("TrackerHitsECal",SimTrackerHit.class); - - - } - - - @Override - public void process(EventHeader event) { - System.out.println("EmptyEventsReadoutDriver:: processing event!"); - System.out.println(event.toString()); - printCollections(event); - System.out.println("empty count = "+emptyCount); - if(!gotFirstRealEvent){ - System.out.println("EmptyEventsReadoutDriver:: Making the empty bunch"); - //make an empty lcsim event based on this, real event - // emptyEvent=makeEmptyEventFromMC(event); - //just get the metadata from first event - getMCMetaData(event); - gotFirstRealEvent=true; - } - - // check if we should add empty or continue - - if(emptyCount mcCollections = new ArrayList(mcEvent.getMetaData()); - for (LCMetaData mcCollectionMeta : mcCollections) { - String mcCollectionName = mcCollectionMeta.getName(); - // check to see if this collection is in the base map - // if so, copy collection, clear it, and put it in new event. - if (baseCollectionMap.containsKey(mcCollectionName)){ - List collection =new ArrayList<> ((List) mcEvent.get(mcCollectionName)); - collection.clear(); //remove element - System.out.println("EmptyEventsReadoutDriver:: inserting collection "+mcCollectionName); - - this.putCollection(mcCollectionMeta, collection, lcsimEvent); - } - } - System.out.println("EmptyEventsReadoutDriver::returning empty event"); - return lcsimEvent; - } - - private EventHeader makeEmptyEvent(){ - int eventID=666666; - long time=(long)ReadoutDataManager.getCurrentTime(); - System.out.println("making an empty bunch with time = "+time); - //this was taken from evio/src/main/java/org/hps/evio/BaseEventBuilder.java - // Create a new LCSimEvent. - EventHeader lcsimEvent = - new BaseLCSimEvent( - ConditionsManager.defaultInstance().getRun(), - eventID, - ConditionsManager.defaultInstance().getDetector(), - time); - - // for (Map.Entry> thisEntry : baseCollectionMap.entrySet()) { - for (String name : baseCollectionNames) { - // String name = entry.getKey(); - // use the already obtained Metadata from the first MC event - // in order to get the flags right - System.out.println("EmptyEventsReadoutDriver:: inserting collection "+name); - for(LCMetaData mcCollectionMeta : mcCollections) { - // System.out.println("looping over collections from mcMetaData: "+mcCollectionMeta.getName()); - if (mcCollectionMeta.getName().equals(name)){ - List collection = new ArrayList<> (); - // System.out.println("EmptyEventsReadoutDriver:: inserting collection "+name); - this.putCollection(mcCollectionMeta, collection, lcsimEvent); - } - } - - } - - System.out.println("####################### this should be an empty event ###################"); - printCollections(lcsimEvent); - System.out.println("#############################################################################"); - return lcsimEvent; - - } - - protected void putCollection(LCMetaData meta, List entries, EventHeader event) { - String[] readout = meta.getStringParameters().get("READOUT_NAME"); - if (readout != null) { - event.put(meta.getName(), entries, meta.getType(), meta.getFlags(), readout[0]); - } else { - event.put(meta.getName(), entries, meta.getType(), meta.getFlags()); - } - if (this.getHistogramLevel() > HLEVEL_NORMAL) - System.out.println("Putting collection" + meta.getName() + " into event."); - } - - private void getMCMetaData(EventHeader mcEvent){ - mcCollections = new ArrayList(mcEvent.getMetaData()); - } - - private void clearEvent(EventHeader event){ - List evtCollections = new ArrayList(event.getMetaData()); - for (LCMetaData evtCollectionMeta : evtCollections) { - String colName=evtCollectionMeta.getName(); - List col=(List)event.get(colName); - if(col.size()>0){ - System.out.println("clearing collection "+colName+" of size = "+col.size()); - ((List)event.get(colName)).clear(); - System.out.println(".....new size = "+col.size()); - } - } - } - - private void printCollections(EventHeader event){ - List Collections = new ArrayList(event.getMetaData()); - for (LCMetaData CollectionMeta : Collections) { - String CollectionName = CollectionMeta.getName(); - // check to see if this collection is in the base map - // if so, copy collection, clear it, and put it in new event. - List collection =new ArrayList<> ((List) event.get(CollectionName)); - System.out.println("EmptyEventsReadoutDriver::printCollections:: "+CollectionName+" has "+collection.size()+" entries"); - } - } - - @Override - protected double getTimeDisplacement() { - return 0; - } - - @Override - protected double getTimeNeededForLocalOutput() { - // TODO: Probably should have some defined value - buffer seems to be filled enough from the ecal delay alone, though. - return 0; - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/GTPClusterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/GTPClusterNoSpacingReadoutDriver.java deleted file mode 100755 index 539f28a7c..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/GTPClusterNoSpacingReadoutDriver.java +++ /dev/null @@ -1,390 +0,0 @@ -package org.hps.digi.nospacing; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutDriver; -import org.hps.readout.util.collection.LCIOCollection; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.hps.readout.util.collection.TriggeredLCIOData; -import org.hps.recon.ecal.cluster.ClusterType; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.daqconfig2019.VTPConfig2019; -import org.hps.record.daqconfig.ConfigurationManager; -import org.hps.record.daqconfig.DAQConfig; -import org.hps.record.daqconfig.GTPConfig; -import org.lcsim.event.CalorimeterHit; -import org.lcsim.event.Cluster; -import org.lcsim.event.EventHeader; -import org.lcsim.event.base.BaseCluster; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.HPSEcal3; -import org.lcsim.geometry.subdetector.HPSEcal3.NeighborMap; -import org.lcsim.lcio.LCIOConstants; - -/** - * Class GTPClusterNoSpacingReadoutDriver produces GTP cluster - * objects for use in the readout trigger simulation. It takes in - * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} objects as - * input and generates clusters from these using the GTP algorithm. - * This algorithm works by selected all hits in the current - * clock-cycle (4 ns period) and comparing them to adjacent hits. If - * a given hit is an energy maximum compared to all adjacent hits in - * both the current clock-cycle, and a number of clock-cycles before - * and after the current cycle (defined through the variable {@link - * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#temporalWindow - * temporalWindow} and set through the method {@link - * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#setClusterWindow(int) - * setClusterWindow(int)}), then it is a seed hit so long as it also - * exceeds a certain minimum energy (defined through the variable - * {@link - * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#seedEnergyThreshold - * seedEnergyThreshold} and set through the method {@link - * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#setSeedEnergyThreshold(double) - * setSeedEnergyThreshold(double)}).

- * Clusters are then output as objects of type {@link - * org.lcsim.event.Cluster Cluster} to the specified output - * collection. If the {@link - * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#setWriteClusterCollection(boolean) - * setWriteClusterCollection(boolean)} is set to true, the clusters - * will also be persisted into the output LCIO file. - */ -public class GTPClusterNoSpacingReadoutDriver extends ReadoutDriver { - // ============================================================== - // ==== LCIO Collections ======================================== - // ============================================================== - - /** - * The name of the collection that contains the calorimeter hits - * from which clusters should be generated. - */ - private String inputCollectionName = "EcalCorrectedHits"; - /** - * The name of the collection into which generated clusters should - * be output. - */ - private String outputCollectionName = "EcalClustersGTP"; - - // ============================================================== - // ==== Driver Options ========================================== - // ============================================================== - - /** - * The time window used for cluster verification. A seed hit must - * be the highest energy hit within plus or minus this range in - * order to be considered a valid cluster. - */ - private int temporalWindow = 48; - /** - * The minimum energy needed for a hit to be considered as a seed - * hit candidate. - */ - private double seedEnergyThreshold = 0.050; - /** - * The local time for the driver. This starts at 2 ns due to a - * quirk in the timing of the {@link - * org.hps.readout.ecal.updated.EcalReadoutDriver - * EcalReadoutDriver}. - */ - private double localTime = 0.0; - /** - * The length of time by which objects produced by this driver - * are shifted due to the need to buffer data from later events. - * This is calculated automatically. - */ - private double localTimeDisplacement = 0; - - // ============================================================== - // ==== Driver Parameters ======================================= - // ============================================================== - - /** - * An object which can provide, given an argument cell ID, a map - * of cell IDs that are physically adjacent to the argument ID. - * This is used to determine adjacency for energy comparisons in - * the clustering algorithm. - */ - private NeighborMap neighborMap; - - private HPSEcal3 calorimeterGeometry = null; - - private boolean checkInputStatus=false; //don't check status if running on non-spaced events. - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfiguration2016AppliedintoReadout(boolean state) { - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig daq = ConfigurationManager.getInstance(); - GTPConfig config = daq.getGTPConfig(); - - // Load the DAQ settings from the configuration manager. - seedEnergyThreshold = config.getSeedEnergyCutConfig().getLowerBound(); - } - }); - } - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - VTPConfig2019 config = daq.getVTPConfig(); - - // Load the DAQ settings from the configuration manager. - seedEnergyThreshold = config.getEcalClusterSeedThr(); - temporalWindow = config.getEcalClusterHitDT(); - } - }); - } - } - - @Override - public void detectorChanged(Detector etector) { - // Get the calorimeter data object. - //HPSEcal3 ecal = (HPSEcal3) DatabaseConditionsManager.getInstance().getDetectorObject().getSubdetector("Ecal"); - calorimeterGeometry = (HPSEcal3) DatabaseConditionsManager.getInstance().getDetectorObject().getSubdetector("Ecal"); - if(calorimeterGeometry == null) { - throw new IllegalStateException("Error: Calorimeter geometry data object not defined."); - } - - // Get the calorimeter hit neighbor map. - neighborMap = calorimeterGeometry.getNeighborMap(); - if(neighborMap == null) { - throw new IllegalStateException("Error: Calorimeter hit neighbor map is not defined."); - } - } - - @Override - public void process(EventHeader event) { - // Check the data management driver to determine whether the - // input collection is available or not. - if(checkInputStatus&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, ReadoutDataManager.getCurrentTime() +192.0)) { - // System.out.println("Skipping GTP Readout with because collection doesn't exist at "+(ReadoutDataManager.getCurrentTime() + 192.0)); - return; - } - - // Get the hits that occur during the present clock-cycle, as - // well as the hits that occur in the verification window - // both before and after the current clock-cycle. - // TODO: Simplify this? - Collection allHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 192.0, inputCollectionName, CalorimeterHit.class); - // Collection foreHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime() - temporalWindow, ReadoutDataManager.getCurrentTime(), inputCollectionName, CalorimeterHit.class); - //Collection postHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime() + 4.0, ReadoutDataManager.getCurrentTime() + temporalWindow + 4.0, inputCollectionName, CalorimeterHit.class); - - - // List allHits = new ArrayList(seedCandidates.size() + foreHits.size() + postHits.size()); - - //allHits.addAll(foreHits); - //allHits.addAll(seedCandidates); - //allHits.addAll(postHits); - // System.out.println(this.getClass().getName()+":: "+inputCollectionName+":: local time = "+ReadoutDataManager.getCurrentTime()+" number of seeds = "+allHits.size()); - - // Store newly created clusters. - List gtpClusters = new ArrayList(); - - // Iterate over all seed hit candidates. - seedLoop: - for(CalorimeterHit seedCandidate : allHits) { - // A seed candidate must meet a minimum energy cut to be - // considered for clustering. - if(seedCandidate.getRawEnergy() < seedEnergyThreshold) { - continue seedLoop; - } - - // Collect other hits that are adjacent to the seed hit - // and may be a part of the cluster. - List clusterHits = new ArrayList(); - - // Iterate over all other hits in the clustering window - // and check that the seed conditions are met for the - // seed candidate. Note that all hits are properly within - // the clustering time window by definition, so the time - // condition is not checked explicitly. - hitLoop: - for(CalorimeterHit hit : allHits) { - // If the hit is not adjacent to the seed hit, it can - // be ignored. - if(!neighborMap.get(seedCandidate.getCellID()).contains(hit.getCellID())) { - continue hitLoop; - } - - // A seed hit must have the highest energy in its - // spatiotemporal window. If it is not, this is not a - // valid seed hit. - if(seedCandidate.getRawEnergy() < hit.getRawEnergy()) { - continue seedLoop; - } - - // Add the hit to the list of cluster hits. - clusterHits.add(hit); - } - - // If no adjacent hit was found that invalidates the seed - // condition, then the seed candidate is valid and a - // cluster should be formed. - gtpClusters.add(createBasicCluster(seedCandidate, clusterHits)); - } - - // Pass the clusters to the data management driver. - // System.out.println(this.getClass().getName()+":: number of GTP Clusters "+gtpClusters.size()); - ReadoutDataManager.addData(outputCollectionName, gtpClusters, Cluster.class); - } - - @Override - public void startOfData() { - // Define the output LCSim collection parameters. - LCIOCollectionFactory.setCollectionName(outputCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(1 << LCIOConstants.CLBIT_HITS); - LCIOCollection clusterCollectionParams = LCIOCollectionFactory.produceLCIOCollection(Cluster.class); - - // Instantiate the GTP cluster collection with the readout - // data manager. - localTimeDisplacement = temporalWindow + 4.0; - addDependency(inputCollectionName); - ReadoutDataManager.registerCollection(clusterCollectionParams, false); - } - - @Override - protected Collection> getOnTriggerData(double triggerTime) { - // If clusters are not to be output, return null. - if(!isPersistent()) { return null; } - - // Create a list to store the on-trigger collections. There - // are two collections outputs for this driver - the clusters - // and the cluster hits. Unlike other drivers, the clusterer - // must handle its own output because the manager does not - // know that it must also specifically output the hits from - // each cluster as well. - List> collectionsList = new ArrayList>(2); - - // Define the LCIO collection settings for the clusters. - LCIOCollectionFactory.setCollectionName(outputCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(1 << LCIOConstants.CLBIT_HITS); - LCIOCollection clusterCollectionParams = LCIOCollectionFactory.produceLCIOCollection(Cluster.class); - - // Define the LCIO collection settings for the cluster hits. - int hitFlags = 0; - hitFlags += 1 << LCIOConstants.RCHBIT_TIME; - hitFlags += 1 << LCIOConstants.RCHBIT_LONG; - LCIOCollectionFactory.setCollectionName("EcalClustersGTPSimHits"); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(hitFlags); - LCIOCollectionFactory.setReadoutName(calorimeterGeometry.getReadout().getName()); - LCIOCollection clusterHitsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(CalorimeterHit.class); - - // Get the output time range for clusters. This is either the - // user defined output range, or the default readout window - // that is defined by the readout data manager. - double startTime; - if(Double.isNaN(getReadoutWindowBefore())) { startTime = triggerTime - ReadoutDataManager.getTriggerOffset(); } - else { startTime = triggerTime - getReadoutWindowBefore(); } - - double endTime; - if(Double.isNaN(getReadoutWindowAfter())) { endTime = startTime + ReadoutDataManager.getReadoutWindow(); } - else { endTime = triggerTime + getReadoutWindowAfter(); } - - // Get the cluster data and populate a list of cluster hits. - Collection clusters = ReadoutDataManager.getData(startTime, endTime, outputCollectionName, Cluster.class); - List clusterHits = new ArrayList(); - for(Cluster cluster : clusters) { - clusterHits.addAll(cluster.getCalorimeterHits()); - } - - // Create the LCIO on-trigger data lists. - TriggeredLCIOData clusterHitData = new TriggeredLCIOData(clusterHitsCollectionParams); - clusterHitData.getData().addAll(clusterHits); - collectionsList.add(clusterHitData); - - TriggeredLCIOData clusterData = new TriggeredLCIOData(clusterCollectionParams); - clusterData.getData().addAll(clusters); - collectionsList.add(clusterData); - - // Return the on-trigger data. - return collectionsList; - } - - @Override - protected double getTimeDisplacement() { - return localTimeDisplacement; - } - - @Override - protected double getTimeNeededForLocalOutput() { - return 0; - } - - /** - * Creates a new cluster object from a seed hit and list of hits. - * @param seedHit - The seed hit of the new cluster. - * @param hits - The hits for the new cluster. - * @return Returns a {@link org.lcsim.event.Cluster Cluster} - * object with the specified properties. - */ - private static final Cluster createBasicCluster(CalorimeterHit seedHit, List hits) { - BaseCluster cluster = new BaseCluster(); - cluster.setType(ClusterType.GTP.getType()); - cluster.addHit(seedHit); - cluster.setPosition(seedHit.getDetectorElement().getGeometry().getPosition().v()); - cluster.setNeedsPropertyCalculation(false); - cluster.addHits(hits); - return cluster; - } - - /** - * Sets the size of the hit verification temporal window. Note - * that this defines the size of the window in one direction, so - * the full time window will be (2 * clusterWindow)+ - * 1 clock-cycles in length. (i.e., it will be a length of - * clusterWindow before the seed hit, a length of - * clusterWindow after the seed hit, plus the cycle - * that includes the seed hit.) Time length is in clock-cycles. - * @param value - The number of clock-cycles around the hit in - * one direction. - */ - public void setClusterWindow(int value) { - temporalWindow = value * 4; - } - - /** - * Sets the minimum seed energy needed for a hit to be considered - * for forming a cluster. This is the seed energy lower bound - * trigger cut and is in units of GeV. - * @param value - The minimum cluster seed energy in GeV. - */ - public void setSeedEnergyThreshold(double value) { - seedEnergyThreshold = value; - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/HodoscopeDigiWithPulseNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/HodoscopeDigiWithPulseNoSpacingReadoutDriver.java deleted file mode 100644 index 57103f85e..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/HodoscopeDigiWithPulseNoSpacingReadoutDriver.java +++ /dev/null @@ -1,224 +0,0 @@ -package org.hps.digi.nospacing; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.hps.readout.ReadoutDriver; -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.conditions.hodoscope.HodoscopeChannel; -import org.hps.conditions.hodoscope.HodoscopeCalibration; -import org.hps.conditions.hodoscope.HodoscopeCalibration.HodoscopeCalibrationCollection; -import org.hps.conditions.hodoscope.HodoscopeChannel.HodoscopeChannelCollection; -import org.hps.conditions.hodoscope.HodoscopeGain; -import org.hps.conditions.hodoscope.HodoscopeGain.HodoscopeGainCollection; -import org.hps.conditions.hodoscope.HodoscopeTimeShift; -import org.hps.conditions.hodoscope.HodoscopeTimeShift.HodoscopeTimeShiftCollection; -import org.hps.readout.ReadoutTimestamp; -import org.lcsim.event.RawTrackerHit; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.Hodoscope_v1; - -import org.hps.conditions.hodoscope.HodoscopeConditions; - -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.daqconfig2019.FADCConfigHodo2019; - -/** - * Class HodoscopeDigitizationWithPulserDataMergingReadoutDriver is an - * implementation of the {@link - * org.hps.digi.CalDigiWithPulserNoSpacingReadoutDriver} for a subdetector of type {@link - * org.lcsim.geometry.subdetector.Hodoscope_v1 Hodoscope_v1}. It - * handles all of the hodoscope-specific functions needed by the - * superclass. - * - * @author Tongtong Cao - */ -public class HodoscopeDigiWithPulseNoSpacingReadoutDriver extends CalDigiWithPulserNoSpacingReadoutDriver { - // The DAQ configuration manager for FADC parameters. - private FADCConfigHodo2019 config = new FADCConfigHodo2019(); - private boolean configStat = false; // Indicates if DAQ configuration is loaded - - // The number of nanoseconds in a clock-cycle (sample). - private static final int nsPerSample = 4; - - /** Stores the set of all channel IDs for the hodoscope. */ - private Set channelIDSet = new HashSet(); - /** Maps hodoscope channels to the gain for that channel. */ - private Map channelToGainsMap = new HashMap(); - /** Maps hodoscope channels to the time shifts for that channel. */ - private Map channelToTimeShiftsMap = new HashMap(); - /** Maps hodoscope channels to the noise sigma and pedestals for that channel. */ - private Map channelToCalibrationsMap = new HashMap(); - /** Factor for gain conversion from self-define-unit/ADC to MeV/ADC. */ - private double factorGainConversion = 0.000833333; - /** Gain scaling factor for raw energy (self-defined unit) of FADC hits. - * In DAQ configuration, gains are scaled by the gain scaling factor for two-hole tiles. - * Such gains from DAQ configuration should be divided by the factor. - */ - - private HodoscopeConditions hodoConditions = null; - - public HodoscopeDigiWithPulseNoSpacingReadoutDriver() { - // Set the default values for each subdetector-dependent - // parameter. - setGeometryName("Hodoscope"); - - setInputHitCollectionName("HodoscopeHits"); - setOutputHitCollectionName("HodoscopeRawHits"); - setTruthRelationsCollectionName("HodoscopeTruthRelations"); - setTriggerPathTruthRelationsCollectionName("HodoscopeTriggerPathTruthRelations"); - setReadoutHitCollectionName("HodoscopeReadoutHits"); - - setNumberSamplesAfter(10); - setNumberSamplesBefore(6); - setPulseTimeParameter(4.0); - setPhotoelectronsPerMeV(10.0); - - setIntegrationThreshold(12); - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - - // Load the DAQ settings from the configuration manager. - numSamplesAfter = daq.getHodoFADCConfig().getNSA() / nsPerSample; - numSamplesBefore = daq.getHodoFADCConfig().getNSB() / nsPerSample; - readoutWindow = daq.getHodoFADCConfig().getWindowWidth() / nsPerSample; - pulserDataWindow = readoutWindow; - - // Get the FADC configuration. - config = daq.getHodoFADCConfig(); - configStat = true; - } - }); - } - - } - - - @Override - public void detectorChanged(Detector detector) { - // Get a copy of the calorimeter conditions for the detector. - hodoConditions = DatabaseConditionsManager.getInstance().getHodoConditions(); - - // Populate the channel ID collections. - populateChannelCollections(); - - // Run the superclass method. - super.detectorChanged(detector); - } - - @Override - protected Set getChannelIDs() { - return channelIDSet; - } - - @Override - protected Long getID(RawTrackerHit hit) { - return Long.valueOf(hodoConditions.getChannels().findGeometric(hit.getCellID()).getChannelId().intValue()); - } - - @Override - protected double getGainConditions(long channelID) { - if(channelToGainsMap.containsKey(Long.valueOf(channelID))) { - return channelToGainsMap.get(Long.valueOf(channelID)).getGain() * factorGainConversion; - } else { - throw new IllegalArgumentException("No gain conditions exist for hodoscope channel ID \"" + channelID + "\"."); - } - } - - @Override - protected double getNoiseConditions(long channelID) { - if(channelToCalibrationsMap.containsKey(Long.valueOf(channelID))) { - return channelToCalibrationsMap.get(Long.valueOf(channelID)).getNoise(); - } else { - throw new IllegalArgumentException("No noise conditions exist for hodoscope channel ID \"" + channelID + "\"."); - } - } - - @Override - protected double getPedestalConditions(long channelID) { - if (channelToCalibrationsMap.containsKey(Long.valueOf(channelID))) { - return channelToCalibrationsMap.get(Long.valueOf(channelID)).getPedestal(); - } else { - throw new IllegalArgumentException( - "No pedestal conditions exist for hodoscope channel ID \"" + channelID + "\"."); - } - } - - @Override - protected double getTimeShiftConditions(long channelID) { - if(channelToTimeShiftsMap.containsKey(Long.valueOf(channelID))) { - return channelToTimeShiftsMap.get(Long.valueOf(channelID)).getTimeShift(); - } else { - throw new IllegalArgumentException("No time shift conditions exist for hodoscope channel ID \"" + channelID + "\"."); - } - } - - @Override - protected int getTimestampFlag() { - return ReadoutTimestamp.SYSTEM_HODOSCOPE; - } - - /** - * Populates the channel ID set and maps all existing channels to - * their respective conditions. - */ - private void populateChannelCollections() { - // Load the conditions database and get the hodoscope channel - // collection data. - final DatabaseConditionsManager conditions = DatabaseConditionsManager.getInstance(); - final HodoscopeGainCollection gains = conditions.getCachedConditions(HodoscopeGainCollection.class, "hodo_gains").getCachedData(); - final HodoscopeChannelCollection channels = conditions.getCachedConditions(HodoscopeChannelCollection.class, "hodo_channels").getCachedData(); - final HodoscopeTimeShiftCollection timeShifts = conditions.getCachedConditions(HodoscopeTimeShiftCollection.class, "hodo_time_shifts").getCachedData(); - final HodoscopeCalibrationCollection calibrations = conditions.getCachedConditions(HodoscopeCalibrationCollection.class, "hodo_calibrations").getCachedData(); - - // Map the gains to channel IDs. - for(HodoscopeGain gain : gains) { - channelToGainsMap.put(Long.valueOf(gain.getChannelId().intValue()), gain); - } - - // Map the pedestals and noise to channel IDs. - for(HodoscopeCalibration calibration : calibrations) { - channelToCalibrationsMap.put(Long.valueOf(calibration.getChannelId().intValue()), calibration); - } - - // Map time shifts to channel IDs. - for(HodoscopeTimeShift timeShift : timeShifts) { - channelToTimeShiftsMap.put(Long.valueOf(timeShift.getChannelId().intValue()), timeShift); - } - - // Store the set of all channel IDs. - for(HodoscopeChannel channel : channels) { - channelIDSet.add(Long.valueOf(channel.getChannelId().intValue())); - } - } - - /** - * Sets factor for gain conversion from self-defined unit/ADC to MeV/ADC - * @param factor - factor for gain conversion from self-defined-unit/ADC to MeV/ADC. - */ - public void setFactorGainConversion(double factor) { - factorGainConversion = factor; - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/HodoscopePatternNoSpacingDriver.java b/digi/src/main/java/org/hps/digi/nospacing/HodoscopePatternNoSpacingDriver.java deleted file mode 100644 index 25ae76ed7..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/HodoscopePatternNoSpacingDriver.java +++ /dev/null @@ -1,436 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; - -import java.util.Map; - -import java.awt.Point; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.util.ArrayList; -import java.util.List; - -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.conditions.hodoscope.HodoscopeChannel; -import org.hps.conditions.hodoscope.HodoscopeChannel.HodoscopeChannelCollection; -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutDriver; -import org.hps.readout.util.collection.LCIOCollection; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.daqconfig2019.VTPConfig2019; -import org.hps.readout.util.HodoscopePattern; -import org.lcsim.event.CalorimeterHit; -import org.lcsim.event.EventHeader; -import org.lcsim.geometry.Detector; - -/** - * Class HodoscopePatternReadoutDriver produces hodoscope pattern - * objects for Ecal-hodo matching in the trigger simulation. Persistency of Hodo - * FADC hits is persistentTime. On the other hand, hodo FADC hits - * is earlier to enter the trigger system than Ecal by - * timeEarlierThanEcal Therefore, for each clock-cycle, FADC hits - * in [localTime - (persistentTime - timeEarlierThanEcal), localTime + - * timeEarlierThanEcal + 4] are taken into account to generate hodoscope - * patterns for all layers. - */ -public class HodoscopePatternNoSpacingDriver extends ReadoutDriver { - /** Maps hodoscope channel IDs to channels. */ - private Map channelMap = new HashMap(); - - /** - * The name of the collection that contains the hodo FADC hits, which raw energy - * is self-defined. Through the hodo FADC hits, hodoscope pattern is generated. - */ - private String inputCollectionName = "HodoscopeCorrectedHits"; - /** - * The name of the collection into which generated hodoscope patterns for all - * four layers should be output. - */ - private String outputCollectionName = "HodoscopePatterns"; - - /** - * The local time for the driver. - */ - private double localTime = 0.0; - - /** - * Hodoscope FADC hit cut - */ - private double fADCHitThreshold = 1.0; - - /** - * Hodoscope tilt/cluster hit cut - */ - private double hodoHitThreshold = 200.0; - - /** - * Gain scaling factor for hits at two-hole tiles. - * Gains from database need to be scaled by the factor - * Gains in the DAQ configuration have been scaled by the factor. - */ - private double gainFactor = 1.25 / 2; - - /** - * Persistent time for hodoscope FADC hit in unit of ns - */ - private double persistentTime = 60.0; - - /** - * Time for hodoscope FADC hits earlier to enter the trigger system than Ecal - * with unit of ns - */ - private double timeEarlierThanEcal = 0.0; - - /** - * The length of time by which objects produced by this driver are shifted due - * to the need to buffer data from later events. This is calculated - * automatically. Hodo FADC hits enter the trigger system earlier than Ecal hits - * by timeEarlierThanEcal - */ - private double localTimeDisplacement = 0.0; - - /** - * According to setup in database, index for hodoscope layers are expressed as - * (layer+1)*y - */ - public static final int TopLayer1 = 1; - public static final int TopLayer2 = 2; - public static final int BotLayer1 = -1; - public static final int BotLayer2 = -2; - - /** - * List for 4 layers; - */ - private List layerList = new ArrayList<>(4); - - /** - * List for 8 (x, hole) points of each layer - */ - private List xHolePointList = new ArrayList<>(8); - - private boolean daqConfigurationAppliedintoReadout = false; - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - daqConfigurationAppliedintoReadout = state; - - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - VTPConfig2019 config = daq.getVTPConfig(); - - // Load the DAQ settings from the configuration manager. - fADCHitThreshold = config.getHodoFADCHitThr(); - hodoHitThreshold = config.getHodoThr(); - persistentTime = config.getHodoDT(); - } - }); - } - } - - @Override - public void process(EventHeader event) { - - // Check the data management driver to determine whether the - // input collection is available or not. - // if (!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + localTimeDisplacement)) { - // return; - // } - if (!ReadoutDataManager.checkCollectionStatus(inputCollectionName, ReadoutDataManager.getCurrentTime())) { - // System.out.println(this.getClass().getName()+":: "+inputCollectionName+" is not ready for this!"); - return; - } - - // Hodoscope FADC hits enter the trigger system earlier than Ecal by the time - // timeEarlierThanEcal . - // On the other hand, hodoscope FADC hits persist with a range of - // persistentTime. - // To build current hodo patterns, FADC hits between localTime - (persistentTime - // - timeEarlierThanEcal) and localTime + timeEarlierThanEcal + 4 are used. - // Collection fadcHits = ReadoutDataManager.getData( - // localTime - (persistentTime - timeEarlierThanEcal), localTime + timeEarlierThanEcal + 4.0, - // inputCollectionName, CalorimeterHit.class); - - Collection fadcHits = ReadoutDataManager.getData( - ReadoutDataManager.getCurrentTime() - (persistentTime - timeEarlierThanEcal), ReadoutDataManager.getCurrentTime() + timeEarlierThanEcal + 4.0, - inputCollectionName, CalorimeterHit.class); - - // System.out.println(this.getClass().getName()+":: found "+fadcHits.size()+" fadcHits"); - // Increment the local time. - - // All hits over fadcHitThreshold are saved for each hole of each - // layer - Map>> energyListMapForLayerMap = new HashMap>>(); - - for (int layer : layerList) { - Map> energyListMap = new HashMap>(); - for (Point point : xHolePointList) { - energyListMap.put(point, new ArrayList()); - } - energyListMapForLayerMap.put(layer, energyListMap); - } - - for (CalorimeterHit hit : fadcHits) { - double energy = hit.getRawEnergy(); - if (energy > fADCHitThreshold) { - Long cellID = hit.getCellID(); - int layer = channelMap.get(cellID).getLayer(); - int y = channelMap.get(cellID).getIY(); - int x = channelMap.get(cellID).getIX(); - int hole = channelMap.get(cellID).getHole(); - - Point point = new Point(x, hole); - // Energy of hits is scaled except hits at tiles 0 and 4 - if(x == 0 || x == 4) energyListMapForLayerMap.get((layer + 1) * y).get(point).add(energy); - else { - // Gains in the DAQ configuration has been scaled by the factor. - if(daqConfigurationAppliedintoReadout) energyListMapForLayerMap.get((layer + 1) * y).get(point).add(energy); - else energyListMapForLayerMap.get((layer + 1) * y).get(point).add(energy * gainFactor); - } - } - } - - //Get maximum of energy in lists for each hole of each layer - Map> maxEnergyMapForLayerMap = new HashMap>(); - for (int layer : layerList) { - Map maxEnergyMap = new HashMap<>(); - for (Point point : xHolePointList) { - if(energyListMapForLayerMap.get(layer).get(point).size() != 0) - maxEnergyMap.put(point, Collections.max(energyListMapForLayerMap.get(layer).get(point))); - else - maxEnergyMap.put(point, 0.); - - } - maxEnergyMapForLayerMap.put(layer, maxEnergyMap); - } - - //Hodoscope patterns for all layers - //Order of list: TopLayer1, TopLayer2, BotLayer1, BotLayer2 - List hodoPatterns = new ArrayList<>(4); - - // Flag to determine if a pattern list at the current clock-cycle is added into data manager - boolean flag = false; - - for (int i = 0; i < 4; i++) { - HodoscopePattern pattern = new HodoscopePattern(); - - Map maxEnergyMap = maxEnergyMapForLayerMap.get(layerList.get(i)); - - if (maxEnergyMap.get(xHolePointList.get(0)) > hodoHitThreshold) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_1, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(1)) + maxEnergyMap.get(xHolePointList.get(2)) > hodoHitThreshold) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_2, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(3)) + maxEnergyMap.get(xHolePointList.get(4)) > hodoHitThreshold) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_3, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(5)) + maxEnergyMap.get(xHolePointList.get(6)) > hodoHitThreshold) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_4, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(7)) > hodoHitThreshold) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_5, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(0)) + maxEnergyMap.get(xHolePointList.get(1)) - + maxEnergyMap.get(xHolePointList.get(2)) > hodoHitThreshold - && maxEnergyMap.get(xHolePointList.get(0)) != 0 - && (maxEnergyMap.get(xHolePointList.get(1)) != 0 || maxEnergyMap.get(xHolePointList.get(2)) != 0)) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_12, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(1)) + maxEnergyMap.get(xHolePointList.get(2)) - + maxEnergyMap.get(xHolePointList.get(3)) - + maxEnergyMap.get(xHolePointList.get(4)) > hodoHitThreshold - && (maxEnergyMap.get(xHolePointList.get(1)) != 0 || maxEnergyMap.get(xHolePointList.get(2)) != 0) - && (maxEnergyMap.get(xHolePointList.get(3)) != 0 || maxEnergyMap.get(xHolePointList.get(4)) != 0)) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_23, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(3)) + maxEnergyMap.get(xHolePointList.get(4)) - + maxEnergyMap.get(xHolePointList.get(5)) - + maxEnergyMap.get(xHolePointList.get(6)) > hodoHitThreshold - && (maxEnergyMap.get(xHolePointList.get(3)) != 0 || maxEnergyMap.get(xHolePointList.get(4)) != 0) - && (maxEnergyMap.get(xHolePointList.get(5)) != 0 || maxEnergyMap.get(xHolePointList.get(6)) != 0)) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_34, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(5)) + maxEnergyMap.get(xHolePointList.get(6)) - + maxEnergyMap.get(xHolePointList.get(7)) > hodoHitThreshold - && (maxEnergyMap.get(xHolePointList.get(5)) != 0 || maxEnergyMap.get(xHolePointList.get(6)) != 0) - && maxEnergyMap.get(xHolePointList.get(7)) != 0) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_45, true); - flag = true; - } - - hodoPatterns.add(pattern); - } - // System.out.println(this.getClass().getName()+":: found "+hodoPatterns.size()+" patterns"); - - // At leaset there is a hodo tilt/cluster hit in any layer, then the pattern list is added into data manager - if(flag == true){ - // System.out.println(this.getClass().getName()+":: at least one of the patterns was good!!!"); - ReadoutDataManager.addData(outputCollectionName, hodoPatterns, HodoscopePattern.class); - } - } - - @Override - public void startOfData() { - // Define the output LCSim collection parameters. - LCIOCollectionFactory.setCollectionName(outputCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollection patternCollectionParams = LCIOCollectionFactory - .produceLCIOCollection(HodoscopePattern.class); - - // Instantiate the GTP cluster collection with the readout - // data manager. - localTimeDisplacement = timeEarlierThanEcal + 4.0; - addDependency(inputCollectionName); - ReadoutDataManager.registerCollection(patternCollectionParams, false); - - initLists(); - } - - /** - * Initiate (layer, y) list and (x, hole) list - */ - private void initLists() { - // Add elements for layer list - layerList.add(TopLayer1); - layerList.add(TopLayer2); - layerList.add(BotLayer1); - layerList.add(BotLayer2); - - // Add elements for (x, hole) point list - xHolePointList.add(new Point(0, 0)); - xHolePointList.add(new Point(1, -1)); - xHolePointList.add(new Point(1, 1)); - xHolePointList.add(new Point(2, -1)); - xHolePointList.add(new Point(2, 1)); - xHolePointList.add(new Point(3, -1)); - xHolePointList.add(new Point(3, 1)); - xHolePointList.add(new Point(4, 0)); - } - - @Override - public void detectorChanged(Detector detector) { - // Populate the channel ID collections. - populateChannelCollections(); - } - - /** - * Populates the channel ID set and maps all existing channels to their - * respective conditions. - */ - private void populateChannelCollections() { - // Load the conditions database and get the hodoscope channel - // collection data. - final DatabaseConditionsManager conditions = DatabaseConditionsManager.getInstance(); - final HodoscopeChannelCollection channels = conditions - .getCachedConditions(HodoscopeChannelCollection.class, "hodo_channels").getCachedData(); - - // Map channels to channel IDs - for (HodoscopeChannel channel : channels) { - channelMap.put(Long.valueOf(channel.getChannelId().intValue()), channel); - } - } - - @Override - protected double getTimeDisplacement() { - return localTimeDisplacement; - } - - @Override - protected double getTimeNeededForLocalOutput() { - return 0; - } - - /** - * Sets the name of the input collection containing the objects of type - * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} that are output by the - * digitization driver. - * - * @param collection - The name of the input hit collection. - */ - public void setInputCollectionName(String collection) { - inputCollectionName = collection; - } - - /** - * Sets the name of the output collection containing the objects of type - * {@link org.hps.readout.hodoscope.HodoscopePattern HodoscopePattern} that are - * output by this driver. - * - * @param collection - The name of the output hodoscope pattern collection. - */ - public void setOutputCollectionName(String collection) { - outputCollectionName = collection; - } - - /** - * Sets hodoscope FADC hit threshold - * - * @param FADC hit threshold - */ - public void setFADCHitThreshold(double fADCHitThreshold) { - this.fADCHitThreshold = fADCHitThreshold; - } - - /** - * Sets hodoscope tilt/cluster hit threshold - * - * @param hodoscope tilt/cluster hit threshold - */ - public void setHodoHitThreshold(double hodoHitThreshold) { - this.hodoHitThreshold = hodoHitThreshold; - } - - /** - * Set persistency for hodoscope FADC hit in unit of ns - * - * @param persistency for hodoscope FADC hit in unit of ns - */ - public void setPersistentTime(double persistentTime) { - this.persistentTime = persistentTime; - } - - /** - * Set time for hodoscope FADC hits earlier to enter the trigger system than - * Ecal with unit of ns - * - * @param time for hodoscope FADC hits earlier to enter the trigger system than - * Ecal with unit of ns - */ - public void setTimeEarlierThanEcal(double timeEarlierThanEcal) { - this.timeEarlierThanEcal = timeEarlierThanEcal; - } - - /** - * Set gain factor for raw energy (self-defined unit) of FADC hits - * - * @param gain factor for raw energy (self-defined unit) of FADC hits - */ - public void setGainFactor(double gainFactor) { - this.gainFactor = gainFactor; - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/HodoscopeRawConverterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/HodoscopeRawConverterNoSpacingReadoutDriver.java deleted file mode 100755 index 337b06926..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/HodoscopeRawConverterNoSpacingReadoutDriver.java +++ /dev/null @@ -1,78 +0,0 @@ -package org.hps.digi.nospacing; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; - -import org.hps.readout.ReadoutDriver; -//import org.hps.readout.RawConverterNoSpacingReadoutDriver; -import org.hps.readout.rawconverter.AbstractMode3RawConverter; -import org.hps.readout.rawconverter.HodoscopeReadoutMode3RawConverter; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.Hodoscope_v1; - -/** - * HodoscopeRawConverterNoSpacingReadoutDriver is an - * implementation of {@link org.hps.readout.RawConverterReadoutDriver - * RawConverterReadoutDriver} for the hodoscope subdetector. - * - * @see org.hps.readout.RawConverterReadoutDriver - */ -public class HodoscopeRawConverterNoSpacingReadoutDriver extends RawConverterNoSpacingReadoutDriver { - /** - * The converter object responsible for processing raw hits into - * proper {@link org.lcsim.event.CalorimeterHit CalorimeterHit} - * objects. - */ - private HodoscopeReadoutMode3RawConverter converter = new HodoscopeReadoutMode3RawConverter(); - - /** - * Instantiates the driver with the correct default parameters. - */ - public HodoscopeRawConverterNoSpacingReadoutDriver() { - super("HodoscopeRawHits", "HodoscopeCorrectedHits"); - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // Track changes in the DAQ configuration. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - - // Load the DAQ settings from the configuration manager. - getConverter().setNumberSamplesAfter(daq.getHodoFADCConfig().getNSA()); - getConverter().setNumberSamplesBefore(daq.getHodoFADCConfig().getNSB()); - - // Get the FADC configuration. - getConverter().setFADCConfigHodo2019(daq.getHodoFADCConfig()); - } - }); - } - } - - @Override - protected AbstractMode3RawConverter getConverter() { - return converter; - } - - @Override - protected String getSubdetectorReadoutName(Detector detector) { - Hodoscope_v1 hodoscopeGeometry = (Hodoscope_v1) detector.getSubdetector("Hodoscope"); - return hodoscopeGeometry.getReadout().getName(); - } - - @Override - protected void updateDetectorDependentParameters(Detector detector) { } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/NoSpacingTriggerDriver.java.donothing b/digi/src/main/java/org/hps/digi/nospacing/NoSpacingTriggerDriver.java.donothing deleted file mode 100755 index df2678722..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/NoSpacingTriggerDriver.java.donothing +++ /dev/null @@ -1,159 +0,0 @@ -package org.hps.digi.nospacing; - -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutDriver; - -/** - * Class NoSpacingTriggerDriver is a special subclass of {@link - * org.hps.readout.ReadoutDriver ReadoutDriver} that is responsible - * for simulating trigger behavior. It implements additional behavior - * for handling trigger dead times and issuing triggers to the {@link - * org.hps.readout.ReadoutDataManager ReadoutDataManager}.

- * Implementing drivers are responsible for checking if trigger - * conditions are met. In the event that they are, the method {@link - * org.hps.readout.NoSpacingTriggerDriver#sendTrigger() sendTrigger()} should - * be used to issue the trigger to the data manager. This method will - * automatically check that the dead time condition is met, and will - * only issue the trigger command in the event that it is, so - * implementing drivers do not need to check this condition manually. - *

- * For usage instructions, please see ReadoutDriver. - * @see org.hps.readout.ReadoutDriver - */ -public abstract class NoSpacingTriggerDriver extends ReadoutDriver { - /** - * singles trigger types - */ - public static final String SINGLES0 = "singles0"; - public static final String SINGLES1 = "singles1"; - public static final String SINGLES2 = "singles2"; - public static final String SINGLES3 = "singles3"; - - public static final String TOP = "top"; - public static final String BOT = "bot"; - public static final String TOPBOT = "topbot"; - - public static final String PAIR0 = "pair0"; - public static final String PAIR1 = "pair1"; - public static final String PAIR2 = "pair2"; - public static final String PAIR3 = "pair3"; - - public static final String PULSER = "pulser"; - - public static final String FEE = "fee"; - - /** - * The amount of time that must pass after a trigger before a new - * trigger can be issued, in units of nanoseconds. - */ - private double deadTime = 0.0; - /** - * The last time at which a trigger was issued to the data - * manager, in units of nanoseconds. - */ - private double lastTrigger = Double.NaN; - - /** - * Checks whether the trigger is currently in dead time or not. - * @return Returns true if the trigger is currently - * in dead time, and false if it is not and a - * trigger may be issued. - */ - protected boolean isInDeadTime() { - if(Double.isNaN(lastTrigger)) { return false; } - else { return (lastTrigger + deadTime) > ReadoutDataManager.getCurrentTime(); } - } - - @Override - protected boolean isPersistent() { - throw new UnsupportedOperationException(); - } - - /** - * Gets the dead time for this trigger. - * @return Returns the dead time in units of nanoseconds. - */ - protected double getDeadTime() { - return deadTime; - } - - /** - * Gets the time at which the last trigger occurred. - * @return Returns the last trigger time in units of nanoseconds, - * or as {@link java.lang.Double#NaN Double.NaN} if no trigger - * has occurred yet. - */ - protected double getLastTriggerTime() { - return lastTrigger; - } - - @Override - protected double getReadoutWindowAfter() { - throw new UnsupportedOperationException(); - } - - @Override - protected double getReadoutWindowBefore() { - throw new UnsupportedOperationException(); - } - - /** - * Issues a trigger to the data manager so long as the trigger is - * not presently in dead time. - */ - protected void sendTrigger() { - if(!isInDeadTime()) { - ReadoutDataManager.sendTrigger(this); - lastTrigger = ReadoutDataManager.getCurrentTime(); - } - } - - /** - * Issues a trigger to the data manager so long as the trigger is - * not presently in dead time. - * @param trigger type - */ - protected void sendTrigger(String triggerType) { - if(!isInDeadTime()) { - ReadoutDataManager.sendTrigger(this, triggerType); - lastTrigger = ReadoutDataManager.getCurrentTime(); - } - } - - /** - * Issues a trigger to the data manager so long as the trigger is - * not presently in dead time. - * @param trigger type - * @param top/bot singles trigger - */ - protected void sendTrigger(String triggerType, String topBot) { - if(!isInDeadTime()) { - ReadoutDataManager.sendTrigger(this, triggerType, topBot); - lastTrigger = ReadoutDataManager.getCurrentTime(); - } - } - - /** - * Sets the dead time for the trigger. - * @param samples - The amount of time (in events) before another - * trigger is allowed to occur. - */ - public void setDeadTime(int samples) { - deadTime = samples * ReadoutDataManager.getBeamBunchSize(); - } - - @Override - public void setPersistent(boolean state) { - throw new UnsupportedOperationException(); - } - - @Override - public void setReadoutWindowAfter(double value) { - throw new UnsupportedOperationException(); - } - - @Override - public void setReadoutWindowBefore(double value) { - throw new UnsupportedOperationException(); - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/RawConverterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/RawConverterNoSpacingReadoutDriver.java deleted file mode 100755 index a13db77be..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/RawConverterNoSpacingReadoutDriver.java +++ /dev/null @@ -1,259 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import org.hps.readout.ReadoutDriver; -import org.hps.readout.rawconverter.AbstractMode3RawConverter; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.lcsim.event.CalorimeterHit; -import org.lcsim.event.EventHeader; -import org.lcsim.event.RawCalorimeterHit; -import org.lcsim.geometry.Detector; -import org.lcsim.lcio.LCIOConstants; -import org.hps.readout.ReadoutDataManager; -/** - * RawConverterNoSpacingReadoutDriver processes ADC hit data - * objects and converts them to energy hit objects. It serves as an - * interface to a {@link - * org.hps.readout.rawconverter.AbstractMode3RawConverter - * AbstractMode3RawConverter} object, where the actual conversion is - * performed. - *

- * RawConverterNoSpacingReadoutDriver itself is abstract - it - * requires that implementing classes handle any subdetector-specific - * functionality. - */ -public abstract class RawConverterNoSpacingReadoutDriver extends ReadoutDriver { - /** - * Sets the name of the input {@link - * org.lcsim.event.RawCalorimeterHit RawCalorimeterHit} - * collection. - */ - private String inputCollectionName; - - /** - * Sets the name of the output {@link - * org.lcsim.event.CalorimeterHit CalorimeterHit} collection. - */ - private String outputCollectionName; - - /** - * Tracks the current local time in nanoseconds for this driver. - */ - private double localTime = 0.0; - - //size to look for hits in 4ns clock ticks - private double EVENT_WINDOW=48; - - /** - * Indicates whether channels that are marked as "bad" in the - * conditions database should be skipped when producing hits. - */ - protected boolean skipBadChannels = false; - - protected boolean checkInput = false; - - protected RawConverterNoSpacingReadoutDriver(String defaultInputCollectionName, String defaultOutputCollectionName) { - inputCollectionName = defaultInputCollectionName; - outputCollectionName = defaultOutputCollectionName; - } - - @Override - public final void detectorChanged(Detector detector) { - // Allow implementing drivers to catch the detector changed - // event, if needed. - updateDetectorDependentParameters(detector); - - // Update the converter. - getConverter().updateDetector(detector); - - // Update the readout name for the managed collection. - ReadoutDataManager.updateCollectionReadoutName(outputCollectionName, CalorimeterHit.class, getSubdetectorReadoutName(detector)); - } - - @Override - public final void process(EventHeader event) { - // Check the data management driver to determine whether the - // input collection is available or not. - if(checkInput&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + 4.0)) { - // System.out.println(this.getClass().getName()+":: checkInput or Collection status Failed"); - return; - } - - // Get all of the raw hits in the current clock-cycle. - // Collection rawHits = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionName, RawCalorimeterHit.class); - Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 4.0*EVENT_WINDOW, inputCollectionName, RawCalorimeterHit.class); - // System.out.println("RawConverter:: "+ inputCollectionName+" local time = "+localTime+" number of seeds = "+rawHits.size()); - // Increment the local time. - // localTime += 4.0; - - // Pass the raw hits to the raw converter to obtain proper - // calorimeter hits. In readout, raw hits are always Mode-3, - // so there is no need to check the form. - List newHits = new ArrayList(); - - for(RawCalorimeterHit hit : rawHits) { - // Convert the raw hit. - CalorimeterHit newHit = getConverter().convertHit(hit, 0.0); - - // If the hit is on a bad channel, and these are set to - // be skipped, ignore the hit. Otherwise, add it to the - // output list. - if(skipBadChannels && isBadChannel(newHit.getCellID())) { - continue; - } - - // Add the new hit. - newHits.add(newHit); - } - // System.out.println("RawConverter:: "+ outputCollectionName+" adding new hits with size = "+newHits.size()+" at time = "+localTime); - // Add the calorimeter hit collection to the data manager. - ReadoutDataManager.addData(outputCollectionName, newHits, CalorimeterHit.class); - // Increment the local time for real. - localTime += 4.0*125; - - } - - @Override - public void startOfData() { - // Set the LCIO flags for the output collection. Flags are - // set to store the hit time and hit position respectively. - int flags = 0; - flags += 1 << LCIOConstants.RCHBIT_TIME; - flags += 1 << LCIOConstants.RCHBIT_LONG; - - // Define the LCSim collection parameters for this driver's - // output. - LCIOCollectionFactory.setCollectionName(outputCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(flags); - - // Set the dependencies for the driver and register its - // output collections with the data management driver. - addDependency(inputCollectionName); - - // Register the output collection. - ReadoutDataManager.registerCollection(LCIOCollectionFactory.produceLCIOCollection(CalorimeterHit.class), isPersistent(), - getReadoutWindowBefore(), getReadoutWindowAfter()); - } - - /** - * Gets the {@link org.hps.readout.ReadoutRawConverter - * ReadoutRawConverter} object used to convert hits for this - * subdetector. - * @return Returns the raw converter. - */ - protected abstract AbstractMode3RawConverter getConverter(); - - /** - * Gets the readout name for this subdetector from the geometry. - * @param detector - The geometry object. - * @return Returns the subdetector readout name. - */ - protected abstract String getSubdetectorReadoutName(Detector detector); - - @Override - protected final double getTimeDisplacement() { - return 0; - } - - @Override - protected final double getTimeNeededForLocalOutput() { - return 0; - } - - /** - * Indicates whether or not the channel on which a hit occurs is - * a "bad" channel according to the conditions database. - * @param hit - The hit to check. - * @return Returns true if the hit channel is - * flagged as "bad" and false otherwise. - * @throws UnsupportedOperationException Occurs if the - * subdetector represented by the driver does not support bad - * channel exclusion. - */ - protected boolean isBadChannel(long channelID) { - throw new UnsupportedOperationException("Driver \"" + getClass().getSimpleName() + "\" does not support bad channel exclusion."); - } - - /** - * Updates any detector-specific parameters needed by the - * implementing class. - * @param detector - The current detector geometry. - */ - protected abstract void updateDetectorDependentParameters(Detector detector); - - /** - * Sets the name of the input collection containing the objects - * of type {@link org.lcsim.event.RawCalorimeterHit - * RawCalorimeterHit} that are output by the digitization driver. - * @param collection - The name of the input raw hit collection. - */ - public void setInputCollectionName(String collection) { - inputCollectionName = collection; - } - - /** - * Sets the number of integration samples that should be included - * in a pulse integral after the threshold-crossing event. - * @param samples - The number of samples, where a sample is a - * 4 ns clock-cycle. - */ - public void setNumberSamplesAfter(int samples) { - getConverter().setNumberSamplesAfter(4 * samples); - } - - /** - * Sets the number of integration samples that should be included - * in a pulse integral before the threshold-crossing event. - * @param samples - The number of samples, where a sample is a - * 4 ns clock-cycle. - */ - public void setNumberSamplesBefore(int samples) { - getConverter().setNumberSamplesBefore(4 * samples); - } - - /** - * Sets factor of unit conversion for returned value of the method - * AbstractBaseRawConverter::adcToEnergy(). - * @param factor of unit conversion - */ - public void setFactorUnitConversion(double factor) { - getConverter().setFactorUnitConversion(factor); - } - - /** - * Sets the name of the output collection containing the objects - * of type {@link org.lcsim.event.CalorimeterHit CalorimeterHit} - * that are output by this driver. - * @param collection - The name of the output hit collection. - */ - public void setOutputCollectionName(String collection) { - outputCollectionName = collection; - } - - /** - * Indicates whether or not data from channels flagged as "bad" - * in the conditions system should be ignored. true - * indicates that they should be ignored, and false - * that they should not. - * @param apply - true indicates that "bad" channels - * will be ignored and false that they will not. - * @throws UnsupportedOperationException Occurs if the - * subdetector represented by the driver does not support bad - * channel exclusion. - */ - public void setSkipBadChannels(boolean state) { - throw new UnsupportedOperationException("Driver \"" + getClass().getSimpleName() + "\" does not support bad channel exclusion."); - } - - /** - * Sets the size of the ADC buffer. This is needed for proper - * handling of Mode-3 hits in the raw converter. - * @param window - The buffer size in units of 4 ns clock-cycles. - */ - public void setReadoutWindow(int window) { - getConverter().setWindowSamples(window); - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/SinglesTrigger2019NoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/SinglesTrigger2019NoSpacingReadoutDriver.java deleted file mode 100644 index bf7460e07..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/SinglesTrigger2019NoSpacingReadoutDriver.java +++ /dev/null @@ -1,415 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.Collection; -import java.util.List; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.util.ArrayList; - -import org.hps.readout.ReadoutDataManager; -//import org.hps.digi.nospacing.NoSpacingTriggerDriver; -import org.hps.readout.TriggerDriver; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.triggerbank.TriggerModule2019; -import org.lcsim.event.Cluster; -import org.lcsim.event.EventHeader; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.HPSEcal3; -import org.lcsim.util.aida.AIDA; - -import org.hps.readout.util.HodoscopePattern; - -import hep.aida.IHistogram1D; -import hep.aida.IHistogram2D; - -/** - * SinglesTrigger2019NoSpacingReadoutDriver simulates an HPS singles trigger - * for 2019 MC. It takes in clusters produced by the - * {@link org.hps.readout.ecal.updated.GTPClusterReadoutDriver - * GTPClusterReadoutDriver} and hodoscope patterns produced by the - * {@link HodoscopePatternReadoutDriver}, and perform the necessary trigger - * logic on them. If a trigger is detected, it is sent to the readout data - * manager so that a triggered readout event may be written. - */ -public class SinglesTrigger2019NoSpacingReadoutDriver extends TriggerDriver { - // ============================================================== - // ==== LCIO Collections ======================================== - // ============================================================== - /** - * Indicates singles trigger type. Corresponding DAQ configuration is accessed by DAQ - * configuration system, and applied into readout. - */ - private String triggerType = "singles3"; - - /** - * Indicates the name of the calorimeter geometry object. This is - * needed to allow access to the calorimeter channel listings. - */ - private String ecalGeometryName = "Ecal"; - /** - * Specifies the name of the LCIO collection containing the input - * GTP clusters that are used for triggering. - */ - private String inputCollectionNameEcal = "EcalClustersGTP"; - - private String inputCollectionNameHodo = "HodoscopePatterns"; - - // ============================================================== - // ==== Driver Options ========================================== - // ============================================================== - - /** - * Specifies the beam energy for the input data. This defines the - * limits of the energy trigger plots and has no further effect. - */ - private double beamEnergy = 4.55; - /** - * Stores the trigger settings and performs trigger logic. - */ - private TriggerModule2019 triggerModule = new TriggerModule2019(); - - private double ecalTimeDisplacement = 20.0; //ns - private double hodoTimeDisplacement = 4.0; //ns - - boolean requireHodo=true; - - // ============================================================== - // ==== Driver Parameters ======================================= - // ============================================================== - - /** - * Tracks the current local time in nanoseconds for this driver. - */ - private double localTime = 0.0; - /** - * Stores a reference to the calorimeter subdetector model. This - * is needed to extract the crystal indices from the cell ID. - */ - private HPSEcal3 ecal = null; - /** - * Defines the size of an energy bin for trigger output plots. - */ - private static final double BIN_SIZE = 0.025; - - - // ============================================================== - // ==== AIDA Plots ============================================== - // ============================================================== - - private AIDA aida = AIDA.defaultInstance(); - private static final int NO_CUTS = 0; - private static final int WITH_CUTS = 1; - private IHistogram1D[] clusterSeedEnergy = new IHistogram1D[2]; - private IHistogram1D[] clusterHitCount = new IHistogram1D[2]; - private IHistogram1D[] clusterTotalEnergy = new IHistogram1D[2]; - private IHistogram2D[] clusterDistribution = new IHistogram2D[2]; - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - if(triggerType.contentEquals(SINGLES3)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles3Config()); - else if(triggerType.equals(SINGLES2)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles2Config()); - else if(triggerType.equals(SINGLES1)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles1Config()); - else if(triggerType.equals(SINGLES0)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles0Config()); - } - }); - } - } - - @Override - public void detectorChanged(Detector detector) { - // Get the calorimeter sub-detector. - org.lcsim.geometry.compact.Subdetector ecalSub = detector.getSubdetector(ecalGeometryName); - if(ecalSub instanceof HPSEcal3) { - ecal = (HPSEcal3) ecalSub; - } else { - throw new IllegalStateException("Error: Unexpected calorimeter sub-detector of type \"" + ecalSub.getClass().getSimpleName() + "; expected HPSEcal3."); - } - } - - @Override - public void process(EventHeader event) { - // Check that clusters are available for the trigger. - Collection clusters = null; - Collection hodoPatterns = null; - ArrayList hodoPatternList = null; - // System.out.println(this.getClass().getName()+":: starting process"); - if(ReadoutDataManager.checkCollectionStatus(inputCollectionNameEcal, ReadoutDataManager.getCurrentTime()-ecalTimeDisplacement) && ReadoutDataManager.checkCollectionStatus(inputCollectionNameHodo, ReadoutDataManager.getCurrentTime()-hodoTimeDisplacement)) { - clusters = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime()-ecalTimeDisplacement, ReadoutDataManager.getCurrentTime() -ecalTimeDisplacement+ 192.0, inputCollectionNameEcal, Cluster.class); - hodoPatterns = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime()-hodoTimeDisplacement, ReadoutDataManager.getCurrentTime() -hodoTimeDisplacement+ 192.0, inputCollectionNameHodo, HodoscopePattern.class); - - // System.out.println(this.getClass().getName()+":: number of gtp clusters = "+clusters.size()); - // System.out.println(this.getClass().getName()+":: number of hodo patterns = "+hodoPatterns.size()); - if(clusters.size() == 0){ - // System.out.println(this.getClass().getName()+":: quitting because no gtp clusters"); - return; - } - - if( requireHodo&&hodoPatterns.size() == 0){ - // System.out.println(this.getClass().getName()+":: quitting because no hodo patterns"); - return; - } - hodoPatternList = new ArrayList<>(hodoPatterns); - - } else { - System.out.println(this.getClass().getName()+":: cluster or hodo collection doesn't exist"); - return; - } - - // Track whether or not a trigger was seen. - boolean triggered = false; - - // There is no need to perform the trigger cuts if the - // trigger is in dead time, as no trigger may be issued - // regardless of the outcome. - if(isInDeadTime()) { - System.out.println(this.getClass().getName()+":: I'm in deadtime ... bailing"); - return; - } - - // Record top/bot status for singles triggers - List topBot = new ArrayList(); - - // Plot the trigger distributions before trigger cuts are - // performed. - for(Cluster cluster : clusters) { - // Get the x and y indices. Note that LCSim meta data is - // not available during readout, so crystal indices must - // be obtained directly from the calorimeter geometry. - java.awt.Point ixy = ecal.getCellIndices(cluster.getCalorimeterHits().get(0).getCellID()); - - // Populate the uncut plots. - clusterSeedEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterSeedEnergy(cluster)); - clusterTotalEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterTotalEnergy(cluster)); - clusterHitCount[NO_CUTS].fill(TriggerModule2019.getClusterHitCount(cluster)); - clusterDistribution[NO_CUTS].fill(ixy.x, ixy.y); - - // Perform the hit count cut. - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_HIT_COUNT_LOW_EN) && !triggerModule.clusterHitCountCut(cluster)) { - // System.out.println(this.getClass().getName()+":: this cluster has too few hits ... continue"); - continue; - } - - // Perform the cluster energy cut. - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_LOW_EN) && !triggerModule.clusterTotalEnergyCutLow(cluster)) { - // System.out.println(this.getClass().getName()+":: this cluster has too low an energy ... continue"); - - continue; - } - - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_HIGH_EN) && !triggerModule.clusterTotalEnergyCutHigh(cluster)) { - // System.out.println(this.getClass().getName()+":: this cluster has too HIGH an energy ... continue"); - - continue; - } - // System.out.println(this.getClass().getName()+":: this cluster survived!"); - // In the setup calorimeter geometry, range of X coordinates is [-23, -1] and [1, 23]. - // The hardware uses cluster X coordinates [-22,0] and [1,23]. - int clusterX = ixy.x; - if(clusterX < 0) clusterX++; - - int clusterY = ixy.y; - - // XMin is at least 0. - if(!triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_XMIN_EN) && !triggerModule.clusterXMinCut(clusterX)) { - // System.out.println(this.getClass().getName()+":: trigger type = "+triggerType+" no trigger because cluster X failed"); - continue; - } - - // XMin cut has been applied. - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_PDE_EN) && !triggerModule.clusterPDECut(cluster, clusterX)) { - // System.out.println(this.getClass().getName()+":: trigger type = "+triggerType+" no trigger because cluster Energy vs X failed"); - continue; - } - } - - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && !triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { - // System.out.println(this.getClass().getName()+":: trigger type = "+triggerType+"no trigger because hodo matching failed"); - continue; - } - - //For 2021 update, Moller triggers - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterXMinCut(clusterX)) { - continue; - } - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterXMaxCut(clusterX)) { - continue; - } - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterYMinCut(clusterY)) { - continue; - } - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterYMaxCut(clusterY)) { - continue; - } - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_PDE_EN) && !triggerModule.clusterMollerPDECut(cluster, clusterX)) { - continue; - } - } - // System.out.println(this.getClass().getName()+":: found a trigger!!!"); - - // Note that a trigger occurred. - triggered = true; - - if(ixy.y > 0) topBot.add(TOP); - else topBot.add(BOT); - - // Populate the cut plots. - clusterSeedEnergy[WITH_CUTS].fill(TriggerModule2019.getValueClusterSeedEnergy(cluster)); - clusterTotalEnergy[WITH_CUTS].fill(TriggerModule2019.getValueClusterTotalEnergy(cluster)); - clusterHitCount[WITH_CUTS].fill(TriggerModule2019.getClusterHitCount(cluster)); - clusterDistribution[WITH_CUTS].fill(ixy.x, ixy.y); - } - - if(triggered) { - boolean topStat = false; - boolean botStat = false; - if(topBot.contains(TOP)) topStat = true; - if(topBot.contains(BOT)) botStat = true; - // System.out.println(this.getClass().getName()+":: Sending Trigger"); - if(topStat && botStat) sendTrigger(triggerType, TOPBOT); - else if(topStat) sendTrigger(triggerType, TOP); - else sendTrigger(triggerType, BOT); - } - } - - @Override - public void startOfData() { - // Define the driver collection dependencies. - addDependency(inputCollectionNameEcal); - - addDependency(inputCollectionNameHodo); - - // Register the trigger. - ReadoutDataManager.registerTrigger(this); - - // Set the plot range based on the beam energy. - int bins = (int) Math.ceil((beamEnergy * 1.1) / BIN_SIZE); - double xMax = bins * BIN_SIZE; - - // Instantiate the trigger plots. - String[] postscripts = { " (No Cuts)", " (With Cuts)" }; - for(int i = NO_CUTS; i <= WITH_CUTS; i++) { - clusterSeedEnergy[i] = aida.histogram1D("Trigger Plots\\Cluster Seed Energy Distribution" + postscripts[i], bins, 0.0, xMax); - clusterHitCount[i] = aida.histogram1D("Trigger Plots\\Cluster Hit Count Distribution" + postscripts[i], 10, -0.5, 9.5); - clusterTotalEnergy[i] = aida.histogram1D("Trigger Plots\\Cluster Total Energy Distribution" + postscripts[i], bins, 0.0, xMax); - clusterDistribution[i] = aida.histogram2D("Trigger Plots\\Cluster Seed Distribution" + postscripts[i], 46, -23, 23, 11, -5.5, 5.5); - } - - // Run the superclass method. - super.startOfData(); - } - - @Override - protected double getTimeDisplacement() { - return 0; - } - - @Override - protected double getTimeNeededForLocalOutput() { - return 0; - } - - /** - * Defines the name of the calorimeter geometry specification. By - * default, this is "Ecal". - * @param ecalName - The calorimeter name. - */ - public void setEcalGeometryName(String value) { - ecalGeometryName = value; - } - - /** - * Sets the name of the LCIO collection from which clusters are - * drawn. - * @param collection - The name of the LCIO collection. - */ - public void setInputCollectionNameEcal(String collection) { - inputCollectionNameEcal = collection; - } - - public void setInputCollectionNameHodo(String collection) { - inputCollectionNameHodo = collection; - } - - public void setTriggerType(String trigger) { - if(!trigger.equals(SINGLES0) && !trigger.equals(SINGLES1) && !trigger.equals(SINGLES2) && !trigger.equals(SINGLES3)) - throw new IllegalArgumentException("Error: wrong trigger type name \"" + trigger + "\"."); - triggerType = trigger; - } - - /** - * Sets the beam energy for the trigger. This is only used to - * determine the range of the x-axis for trigger plots. - * @param value - The beam energy of the input data, in units of - * GeV. - */ - public void setBeamEnergy(double value) { - beamEnergy = value; - } - - /** - * Sets the minimum hit count threshold for the trigger. This - * value is inclusive. - * @param hitCountThreshold - The value of the threshold. - */ - public void setHitCountThreshold(int hitCountThreshold) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_HIT_COUNT_LOW, hitCountThreshold); - } - - /** - * Sets the lower bound for the cluster energy threshold on the - * trigger. This value is inclusive. - * @param clusterEnergyLow - The value of the threshold. - */ - public void setClusterEnergyLowThreshold(double clusterEnergyLow) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_TOTAL_ENERGY_LOW, clusterEnergyLow); - } - - /** - * Sets the upper bound for the cluster energy threshold on the - * trigger. This value is inclusive. - * @param clusterEnergyHigh - The value of the threshold. - */ - public void setClusterEnergyHighThreshold(double clusterEnergyHigh) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_TOTAL_ENERGY_HIGH, clusterEnergyHigh); - } - - - public void setClusterXMin(double xMin) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_XMIN, xMin); - } - - public void setClusterPDEC0(double pdeC0) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C0, pdeC0); - } - - public void setClusterPDEC1(double pdeC1) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C1, pdeC1); - } - - public void setClusterPDEC2(double pdeC2) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C2, pdeC2); - } - - public void setClusterPDEC3(double pdeC3) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C3, pdeC3); - } - -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/SvtDigiWithPulserNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/SvtDigiWithPulserNoSpacingReadoutDriver.java deleted file mode 100755 index f8673ec37..000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/SvtDigiWithPulserNoSpacingReadoutDriver.java +++ /dev/null @@ -1,867 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.PriorityQueue; -import java.util.Set; -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.conditions.svt.SvtTimingConstants; -import org.hps.readout.svt.HPSSVTConstants; -import org.lcsim.detector.tracker.silicon.ChargeCarrier; -import org.lcsim.detector.tracker.silicon.HpsSiSensor; -import org.lcsim.detector.tracker.silicon.SiSensor; -import org.lcsim.geometry.Detector; -import org.lcsim.lcio.LCIOConstants; -import org.lcsim.event.EventHeader; -import org.lcsim.event.LCRelation; -import org.lcsim.event.MCParticle; -import org.lcsim.event.RawTrackerHit; -import org.lcsim.event.SimTrackerHit; -import org.lcsim.event.base.BaseLCRelation; -import org.lcsim.event.base.BaseRawTrackerHit; -import org.lcsim.recon.tracking.digitization.sisim.CDFSiSensorSim; -import org.lcsim.recon.tracking.digitization.sisim.SiElectrodeData; -import org.lcsim.recon.tracking.digitization.sisim.SiElectrodeDataCollection; -import org.lcsim.recon.tracking.digitization.sisim.SiSensorSim; -import org.lcsim.recon.tracking.digitization.sisim.config.SimTrackerHitReadoutDriver; -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutDriver; -import org.hps.readout.ReadoutTimestamp; -import org.hps.readout.util.collection.LCIOCollection; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.hps.readout.util.collection.TriggeredLCIOData; -import org.hps.recon.tracking.PulseShape; -import org.hps.util.RandomGaussian; - -/** - * SVT readout simulation. - * - * @author Sho Uemura - */ -public class SvtDigiWithPulserNoSpacingReadoutDriver extends ReadoutDriver { - //-----------------// - //--- Constants ---// - //-----------------// - private static final String SVT_SUBDETECTOR_NAME = "Tracker"; - private PulseShape shape = new PulseShape.FourPole(); - - private SimTrackerHitReadoutDriver readoutDriver = new SimTrackerHitReadoutDriver(); - private SiSensorSim siSimulation = new CDFSiSensorSim(); - private Map[]> hitMap = new HashMap[]>(); - private Map[]> pulserHitMap = new HashMap[]>(); - private List sensors = null; - - // readout period time offset in ns - private double readoutOffset = 0.0; - private double readoutLatency = 280.0; - // private double pileupCutoff = 300.0; - private double pileupCutoff = 0.0; - private String readout = "TrackerHits"; - private double timeOffset = 30.0; - private boolean noPileup = false; - private boolean addNoise = true; - - private boolean useTimingConditions = false; - - // cut settings - private boolean enableThresholdCut = true; - private int samplesAboveThreshold = 3; - private double noiseThreshold = 2.0; - private boolean enablePileupCut = true; - private boolean dropBadChannels = true; - private boolean debug_=false; - - // Collection Names - private String outputCollection = "SVTRawTrackerHits"; - private String relationCollection = "SVTTrueHitRelations"; - - private LCIOCollection trackerHitCollectionParams; - private LCIOCollection truthRelationsCollectionParams; - private LCIOCollection truthHitsCollectionParams; - /** - * The name of the input {@link org.lcsim.event.RawTrackerHit - * RawTrackerHit} collection from pulser data. - */ - private String pulserDataCollectionName = "SVTRawTrackerHits"; - - public SvtDigiWithPulserNoSpacingReadoutDriver() { - add(readoutDriver); - } - - /** - * Indicates whether or not noise should be simulated when analog - * hits are generated. - * @param addNoise - true adds noise simulation to - * analog hits, while false uses only contributions - * from pulses generated from truth data. - */ - public void setAddNoise(boolean addNoise) { - this.addNoise = addNoise; - } - - /** - * Indicates whether hits consistent with pile-up effects should - * be dropped or not. A hit is considered to be consistent with - * pile-up effects if its earlier sample indices are larger than - * the later ones, suggesting that it includes the trailing end - * of another pulse from earlier in time. - * @param enablePileupCut - true enables the cut and - * drops pile-up hits, while false disables the cut - * and retains them. - */ - public void setEnablePileupCut(boolean enablePileupCut) { - this.enablePileupCut = enablePileupCut; - } - - /** - * Indicates whether noisy analog hits should be retained in - * readout. Hits are required to have a certain number of samples - * that exceeds a programmable noise threshold. The required - * number of samples may be set by the method {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setSamplesAboveThreshold(int) - * setSamplesAboveThreshold(int)} and the noise threshold may be - * set with the method {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setNoiseThreshold(double) - * setNoiseThreshold(double)}. - * @param enableThresholdCut - true enables the cut - * and drops noisy hits, while false disables the - * cut and retains them. - */ - public void setEnableThresholdCut(boolean enableThresholdCut) { - this.enableThresholdCut = enableThresholdCut; - } - - /** - * Sets the noise threshold used in conjunction with the sample - * threshold cut. The cut is enabled or disabled via the method - * {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setEnableThresholdCut(boolean) - * setEnableThresholdCut(boolean)}. - * @param noiseThreshold - The noise threshold. - */ - public void setNoiseThreshold(double noiseThreshold) { - this.noiseThreshold = noiseThreshold; - } - - /** - * Sets the number of smaples that must be above the noise - * threshold as employed by the sample threshold cut. The cut is - * enabled or disabled via the method {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setEnableThresholdCut(boolean) - * setEnableThresholdCut(boolean)}. - * @param samplesAboveThreshold - The number of samples. Only six - * samples are used, so values above six will result in every hit - * being rejected. Values of zero or lower will result in the - * acceptance of every hit. Threshold cut is inclusive. - */ - public void setSamplesAboveThreshold(int samplesAboveThreshold) { - this.samplesAboveThreshold = samplesAboveThreshold; - } - - /** - * Indicates whether pile-up should be simulated. If set to - * false, analog hits are generated from the truth - * hits of a given event individually, with no contribution from - * neighboring events included. If set to true, data - * from multiple events is included. - * @param noPileup - true uses data from neighboring - * events when generating analog hits, while false - * uses only contributions from a single event. - */ - public void setNoPileup(boolean noPileup) { - this.noPileup = noPileup; - } - - /** - * Specifies whether analog hits which occur on "bad" channels - * should be included in readout data or not. - * @param dropBadChannels - true means that "bad" - * channel hits will be excluded from readout, while - * false means that they will be retained. - */ - public void setDropBadChannels(boolean dropBadChannels) { - this.dropBadChannels = dropBadChannels; - } - - /** - * Set the readout latency. This does not directly correspond to - * any internal function in the readout simulation, but affects - * what range of SVT ADC values are output around the trigger. It - * is retained to allow a matching to the hardware function. - * @param readoutLatency - The readout latency to use. - */ - public void setReadoutLatency(double readoutLatency) { - this.readoutLatency = readoutLatency; - } - - /** - * Sets whether to use manually defined timing conditions, or if - * they should be loaded from the conditions database. - * @param useTimingConditions - true uses the values - * from the database, and false the manually defined - * values. - */ - public void setUseTimingConditions(boolean useTimingConditions) { - this.useTimingConditions = useTimingConditions; - } - - /** - * Sets the pulse shape to be used when emulating the analog hit - * response. Valid options are CRRC and - * FourPole. - * @param pulseShape - The pulse shape to be used. - */ - public void setPulseShape(String pulseShape) { - switch (pulseShape) { - case "CR-RC": - shape = new PulseShape.CRRC(); - break; - case "FourPole": - shape = new PulseShape.FourPole(); - break; - default: - throw new RuntimeException("Unrecognized pulseShape: " + pulseShape); - } - } - /** - * Sets the name of the input pulser data collection name. - * @param collection - The collection name. - */ - public void setPulserDataCollectionName(String collection) { - this.pulserDataCollectionName = collection; - } - - @Override - public void detectorChanged(Detector detector) { - // TODO: What does this "SimTrackerHitReadoutDriver" do? - String[] readouts = { readout }; - readoutDriver.setCollections(readouts); - - // Get the collection of all silicon sensors from the SVT. - sensors = detector.getSubdetector(SVT_SUBDETECTOR_NAME).getDetectorElement().findDescendants(HpsSiSensor.class); - - // If pile-up simulation is disabled, instantiate all - // possible processing queues. For the pile-up simulation, - // these are generated as needed. - if(!noPileup) { - for(HpsSiSensor sensor : sensors) { - @SuppressWarnings("unchecked") - int nChans=640; - if(sensor.getNumberOfChannels()==510) - nChans=512; - //really dumb way to account for channels not read out - PriorityQueue[] hitQueues = new PriorityQueue[nChans]; - PriorityQueue[] pulserHitQueues = new PriorityQueue[nChans]; - hitMap.put(sensor, hitQueues); - pulserHitMap.put(sensor, pulserHitQueues); - } - } - - // Load timing conditions from the conditions database, if - // this is requested. - if(useTimingConditions) { - SvtTimingConstants timingConstants = DatabaseConditionsManager.getInstance().getCachedConditions(SvtTimingConstants.SvtTimingConstantsCollection.class, "svt_timing_constants").getCachedData().get(0); - readoutOffset = 4 * (timingConstants.getOffsetPhase() + 3); - // readoutLatency = 248.0 + timingConstants.getOffsetTime(); - readoutLatency = readoutLatency + timingConstants.getOffsetTime(); - System.out.println(this.getClass().getName()+":: readout offset = "+readoutOffset+" latency = "+readoutLatency); - } - } - - @Override - public void process(EventHeader event) { - super.process(event); - // get the pulser hits - Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, pulserDataCollectionName, RawTrackerHit.class); - // Generate the truth hits. - List stripHits = doSiSimulation(); - List pulserStripHits=makePulserStripHits(rawHits); - if(debug_){ - System.out.println("In SvtDigi:: Current time is = "+ReadoutDataManager.getCurrentTime()); - System.out.println("Number of Sim StripHits for this bunch is "+stripHits.size()); - } - - if(!noPileup) { - // Process each of the pulser hits - for (StripHit pulserHit : pulserStripHits) { - // Get the sensor and channel for the pulser hit. - HpsSiSensor sensor = (HpsSiSensor) pulserHit.sensor; - int channel = pulserHit.channel; - // Queue the hit in the processing queue appropriate - // to its sensor and channel. - PriorityQueue[] pulserHitQueues = pulserHitMap.get(sensor); - if(pulserHitQueues[channel] == null) { - pulserHitQueues[channel] = new PriorityQueue(); - } - pulserHitQueues[channel].add(pulserHit); - } - - // Process each of the truth hits - for (StripHit stripHit : stripHits) { - // Get the sensor and channel for the truth hit. - HpsSiSensor sensor = (HpsSiSensor)stripHit.sensor; - int channel = stripHit.channel; - // Queue the hit in the processing queue appropriate - // to its sensor and channel. - PriorityQueue[] hitQueues = hitMap.get(sensor); - if(hitQueues[channel] == null) { - hitQueues[channel] = new PriorityQueue(); - } - hitQueues[channel].add(stripHit); - } - - // Hits older than a certain time frame should no longer - // be used for pile-up simulation and should be removed - // from the processing queues. - for(SiSensor sensor : sensors) { - // Get the processing queue for the current sensor. - PriorityQueue[] pulserHitQueues = pulserHitMap.get(sensor); - // Check each hit to see if it is still in-time. - for(int i = 0; i < pulserHitQueues.length; i++) { - if(pulserHitQueues[i] != null) { - // Remove old hits. - while(!pulserHitQueues[i].isEmpty() && pulserHitQueues[i].peek().time < ReadoutDataManager.getCurrentTime() - (readoutLatency + pileupCutoff)) { - pulserHitQueues[i].poll(); - } - // If the queue is empty, remove it. - if(pulserHitQueues[i].isEmpty()) { pulserHitQueues[i] = null; } - } - } - - // Get the processing queue for the current sensor. - PriorityQueue[] hitQueues = hitMap.get(sensor); - // Check each hit to see if it is still in-time. - for(int i = 0; i < hitQueues.length; i++) { - if(hitQueues[i] != null) { - // Remove old hits. - while(!hitQueues[i].isEmpty() && hitQueues[i].peek().time < ReadoutDataManager.getCurrentTime() - (readoutLatency + pileupCutoff)) { - hitQueues[i].poll(); - } - // If the queue is empty, remove it. - if(hitQueues[i].isEmpty()) { hitQueues[i] = null; } - } - } - } - } - // Otherwise, process the hits for a no pile-up simulation. - // When no pile-up is simulated, hits are fully processed and - // output on an event-by-event basis. - else { - // Create a list to hold the analog data. - List hits = new ArrayList(); - - // Process each of the truth hits. - for(StripHit stripHit : stripHits) { - // Get the hit parameters. - HpsSiSensor sensor = (HpsSiSensor) stripHit.sensor; - short[] samples = new short[6]; - - // Create a signal buffer and populate it with the - // appropriate pedestal values. - double[] signal = new double[6]; - for(int sampleN = 0; sampleN < 6; sampleN++) { - signal[sampleN] = sensor.getPedestal(stripHit.channel, sampleN); - } - - // If noise should be added, do so. - if(addNoise) { - addNoise(sensor, stripHit.channel, signal); - } - - // Emulate the pulse response and add it to the - // sample array. - for(int sampleN = 0; sampleN < 6; sampleN++) { - double time = sampleN * HPSSVTConstants.SAMPLING_INTERVAL - timeOffset; - shape.setParameters(stripHit.channel, (HpsSiSensor) sensor); - signal[sampleN] += stripHit.amplitude * shape.getAmplitudePeakNorm(time); - samples[sampleN] = (short) Math.round(signal[sampleN]); - } - - // Create raw tracker hits from the sample data. - long channel_id = sensor.makeChannelID(stripHit.channel); - RawTrackerHit hit = new BaseRawTrackerHit(0, channel_id, samples, new ArrayList(stripHit.simHits), sensor); - - // If the analog hit passes the readout cuts, it may - // be added to the data stream. - if(readoutCuts(hit)) { hits.add(hit); } - } - - // Output the processed hits to the LCIO stream. - ReadoutDataManager.addData(outputCollection, hits, RawTrackerHit.class); - } - } - - @Override - public void startOfData() { - // The output collection is only handled by the readout data - // manager if no pile-up simulation is included. Otherwise, - // the driver outputs its own collection at readout. - if(noPileup) { - LCIOCollectionFactory.setCollectionName(outputCollection); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(1 << LCIOConstants.TRAWBIT_ID1); - LCIOCollectionFactory.setReadoutName(readout); - LCIOCollection noPileUpCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawTrackerHit.class); - ReadoutDataManager.registerCollection(noPileUpCollectionParams, true, 8.0, 32.0); - } - addDependency(pulserDataCollectionName); - // Define the LCSim on-trigger collection parameters. - LCIOCollectionFactory.setCollectionName(outputCollection); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(1 << LCIOConstants.TRAWBIT_ID1); - LCIOCollectionFactory.setReadoutName(readout); - trackerHitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawTrackerHit.class); - - LCIOCollectionFactory.setCollectionName(relationCollection); - LCIOCollectionFactory.setProductionDriver(this); - truthRelationsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(LCRelation.class); - - LCIOCollectionFactory.setCollectionName("TrackerHits"); - LCIOCollectionFactory.setFlags(0xc0000000); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setReadoutName("TrackerHits"); - truthHitsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(SimTrackerHit.class); - - // Run the superclass method. - super.startOfData(); - } - - /** - * Performs a simulation of silicon sensor response and generates - * a collection of {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver.StripHit StripHit} - * objects representing the detector response. - * @return Returns a collection of StripHit objects describing - * the detector response for the current event. - */ - private List doSiSimulation() { - // Create a list to store the simulated hit objects. - List stripHits = new ArrayList(); - - // Process each of the SVT sensors. - for(SiSensor sensor : sensors) { - // Set the sensor to be used in the charge deposition - // simulation. - siSimulation.setSensor(sensor); - // Perform the charge deposition simulation. - Map electrodeDataMap = siSimulation.computeElectrodeData(); - - // Iterate over all possible charge carriers. - for(ChargeCarrier carrier : ChargeCarrier.values()) { - // If the sensor is capable of collecting the given - // charge carrier, then obtain the electrode data for - // the sensor. - if(sensor.hasElectrodesOnSide(carrier)) { - // Attempt to obtain electrode data. - SiElectrodeDataCollection electrodeDataCol = electrodeDataMap.get(carrier); - - // If there is no electrode data available create - // a new instance of electrode data. - if(electrodeDataCol == null) { - electrodeDataCol = new SiElectrodeDataCollection(); - } - - // Loop over all sensor channels. - for(Integer channel : electrodeDataCol.keySet()) { - // Get the electrode data for this channel. - SiElectrodeData electrodeData = electrodeDataCol.get(channel); - Set simHits = electrodeData.getSimulatedHits(); - - // Compute hit time as the unweighted average - // of SimTrackerHit times; this is dumb but - // okay since there's generally only one - // SimTrackerHit. - double time = 0.0; - for(SimTrackerHit hit : simHits) { - time += hit.getTime(); - } - time /= simHits.size(); - time += ReadoutDataManager.getCurrentTime(); - - // Get the charge in units of electrons. - double charge = electrodeData.getCharge(); - - // Calculate the amplitude. - double resistorValue = 100; // Ohms - double inputStageGain = 1.5; - // FIXME: This should use the gains instead - double amplitude = (charge / HPSSVTConstants.MIP) * resistorValue * inputStageGain * Math.pow(2, 14) / 2000; - - // Generate a StripHit object containing the - // simulation data and add it to the list. - stripHits.add(new StripHit(sensor, channel, amplitude, time, simHits)); - } - } - } - - // Clear the sensors of all deposited charge - siSimulation.clearReadout(); - } - - // Return the collection of StripHit objects. - return stripHits; - } - - private List makePulserStripHits(Collection rawHits) { - // Create a list to store the simulated hit objects. - List stripHits = new ArrayList(); - for (RawTrackerHit hit: rawHits){ - SiSensor sensor=(SiSensor) hit.getDetectorElement(); - int strip = hit.getIdentifierFieldValue("strip"); - double time=ReadoutDataManager.getCurrentTime(); - stripHits.add(new StripHit(sensor, strip, time, hit)); - } - return stripHits; - } - /** - * Adds a random Gaussian noise signature to the specified signal - * buffer based on the sensor and channel parameters. - * @param sensor - The sensor on which the signal buffer occurs. - * @param channel - The channel on which the signal buffer - * occurs. - * @param signal - The signal buffer. This must be an array of - * size six. - */ - private void addNoise(SiSensor sensor, int channel, double[] signal) { - for(int sampleN = 0; sampleN < 6; sampleN++) { - signal[sampleN] += RandomGaussian.getGaussian(0, ((HpsSiSensor) sensor).getNoise(channel, sampleN)); - } - } - - /** - * Performs each of the three readout cuts, if they are enabled. - * This is the equivalent of calling, as appropriate, the methods - * {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#samplesAboveThreshold(RawTrackerHit) - * samplesAboveThreshold(RawTrackerHit)}, {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#pileupCut(RawTrackerHit) - * pileupCut(RawTrackerHit)}, and {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#badChannelCut(RawTrackerHit) - * badChannelCut(RawTrackerHit)}. - * @param hit - The analog hit to test. - * @return Returns true if all enabled cuts are - * passed, and false otherwise. - */ - private boolean readoutCuts(RawTrackerHit hit) { - // Perform each enabled cut. - if(enableThresholdCut && !samplesAboveThreshold(hit)) { - return false; - } - if(enablePileupCut && !pileupCut(hit)) { - return false; - } - if(dropBadChannels && !badChannelCut(hit)) { - return false; - } - - // If all enabled cuts are passed, return true. - return true; - } - - /** - * Checks whether an analog hit occurred on a "bad" channel. - * @param hit - The hit to be checked. - * @return Returns true if the hit did not - * occur on a bad channel, and false if it did. - */ - private boolean badChannelCut(RawTrackerHit hit) { - HpsSiSensor sensor = (HpsSiSensor) hit.getDetectorElement(); - int channel = hit.getIdentifierFieldValue("strip"); - return !sensor.isBadChannel(channel); - } - - /** - * Attempts to eliminate samples where the pulse starts before - * the sample array. This is done by requiring the second, third, - * and fourth samples of the array to be increasing in value with - * index. - * @param hit - The hit to check. - * @return Returns true if the no pile-up condition - * is met and false if it is not. - */ - private boolean pileupCut(RawTrackerHit hit) { - short[] samples = hit.getADCValues(); - return (samples[2] > samples[1] || samples[3] > samples[2]); - } - - /** - * Attempts to eliminate false hits generated due to noise by - * requiring that a programmable number of samples exceed a - * similarly programmable noise threshold. - * @param hit - The hit to be checked. - * @return Returns true if the noise threshold count - * cut is met and false if it is not. - */ - private boolean samplesAboveThreshold(RawTrackerHit hit) { - // Get the channel and sensor information for the hit. - int channel = hit.getIdentifierFieldValue("strip"); - HpsSiSensor sensor = (HpsSiSensor) hit.getDetectorElement(); - - // Track the noise and pedestal for each sample. - double noise; - double pedestal; - - // Iterate over the samples and count how many are above the - // noise threshold. - int count = 0; - short[] samples = hit.getADCValues(); - for(int sampleN = 0; sampleN < samples.length; sampleN++) { - pedestal = sensor.getPedestal(channel, sampleN); - noise = sensor.getNoise(channel, sampleN); - if(samples[sampleN] - pedestal > noise * noiseThreshold) { - count++; - } - } - - // The cut is passed if enough samples are above the noise - // threshold to pass the minimum count threshold. - return count >= samplesAboveThreshold; - } - - @Override - protected Collection> getOnTriggerData(double triggerTime) { - // No pile-up events are output on an event-by-event basis, - // and as such, do not output anything at this stage. - if(noPileup) { return null; } - // Create a list to hold the analog data - List hits = new ArrayList(); - List truthHits = new ArrayList(); - List trueHitRelations = new ArrayList(); - // Calculate time of first sample - // double firstSample = Math.floor(((triggerTime + 256) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) - // * HPSSVTConstants.SAMPLING_INTERVAL + readoutOffset; - - double firstSample = Math.floor(((triggerTime + 0) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) - * HPSSVTConstants.SAMPLING_INTERVAL + readoutOffset; - - if(debug_){ - System.out.println(this.getClass().getName()+":: trigger time = "+triggerTime); - System.out.println(this.getClass().getName()+":: svt first sample time for trigger = "+firstSample); - } - List processedHits = new ArrayList(); - - for(SiSensor sensor : sensors) { - // Get the hit queues for the current sensor. - PriorityQueue[] hitQueues = hitMap.get(sensor); - PriorityQueue[] pulserHitQueues = pulserHitMap.get(sensor); - - // Iterate over the hit queue channels. - for(int channel = 0; channel < hitQueues.length; channel++) { - // Unless noise should be added, there is nothing to - // process on an empty hit queue. Skip it. - if(!addNoise && (hitQueues[channel] == null || hitQueues[channel].isEmpty()) && (pulserHitQueues[channel] == null || pulserHitQueues[channel].isEmpty())){ - continue; - } - - // Create a buffer to hold the extracted response for - // the channel. - double[] signal = new double[6]; - - //do the pulser hit first...if there is a pulser hit, don't add pedestal or noise to mc hit - boolean hasPulserHit=false; // flag if this channel has a pulser hit - if(pulserHitQueues[channel] != null){ - StripHit ph=pulserHitQueues[channel].poll(); - RawTrackerHit rth=ph.getRawTrackerHit(); - hasPulserHit=true; - short[] samples =rth.getADCValues(); - for(int sampleN = 0; sampleN < 6; sampleN++) { - signal[sampleN] = samples[sampleN]; - } - } - - if(!hasPulserHit){ - // Create a buffer to hold the extracted signal for - // the channel. Populate it with the appropriate - // pedestal values. - - for(int sampleN = 0; sampleN < 6; sampleN++) { - signal[sampleN] = ((HpsSiSensor) sensor).getPedestal(channel, sampleN); - } - - // If noise should be added, do so. - if(addNoise) { - addNoise(sensor, channel, signal); - } - } - - // Create a list to store truth SVT hits. - List simHits = new ArrayList(); - - // If there is data in the mc hit queues, process it. - if(hitQueues[channel] != null) { - if(debug_)System.out.println(this.getClass().getName()+":: data in channel = "+channel); - for(StripHit hit : hitQueues[channel]) { - processedHits.add(hit); - - // Track the noise and contribution to the - // signal from the current hit. - double meanNoise = 0; - double totalContrib = 0; - - // Emulate the pulse response for the hit - // across all size samples. - StringBuffer signalBuffer = new StringBuffer("\t\t\t\tSample Pulse :: ["); - for(int sampleN = 0; sampleN < 6; sampleN++) { - double sampleTime = firstSample + sampleN * HPSSVTConstants.SAMPLING_INTERVAL; - shape.setParameters(channel, (HpsSiSensor) sensor); - double signalAtTime = hit.amplitude * shape.getAmplitudePeakNorm(sampleTime - hit.time); - if(debug_){ - System.out.println(this.getClass().getName()+":: making pulse: sample time = " - +sampleTime+"; hit time = "+hit.time); - System.out.println(this.getClass().getName()+":: signal @ time() = "+signalAtTime); - } - totalContrib += signalAtTime; - signal[sampleN] += signalAtTime; - meanNoise += ((HpsSiSensor) sensor).getNoise(channel, sampleN); - - signalBuffer.append(signalAtTime + " (" + sampleTime + ")"); - if(sampleN != 5) { - signalBuffer.append(" "); - } - } - signalBuffer.append("]"); - - // TODO: Move this to the noise comparison below. - meanNoise /= 6; - - // Calculate the average noise across all - // samples and compare it to the contribution - // from the hit. If it exceeds a the noise - // threshold, store it as a truth hit. - //meanNoise /= 6; - if(totalContrib > 4.0 * meanNoise) { - simHits.addAll(hit.simHits); - } - } - } - - // Convert the samples into a short array, - short[] samples = new short[6]; - for(int sampleN = 0; sampleN < 6; sampleN++) { - samples[sampleN] = (short) Math.round(signal[sampleN]); - } - - // Get the proper channel ID. - long channel_id = ((HpsSiSensor) sensor).makeChannelID(channel); - - // Create a new tracker hit. - RawTrackerHit hit = new BaseRawTrackerHit(0, channel_id, samples, simHits, sensor); - // Only tracker hits that pass the readout cuts may - // be passed through to readout. - if(readoutCuts(hit)) { - // Add the hit to the readout hits collection. - hits.add(hit); - // Associate the truth hits with the raw hit and - // add them to the truth hits collection. - for(SimTrackerHit simHit : hit.getSimTrackerHits()) { - LCRelation hitRelation = new BaseLCRelation(hit, simHit); - trueHitRelations.add(hitRelation); - truthHits.add(simHit); - } - } - } - } - - // Create the collection data objects for output to the - // readout event. - TriggeredLCIOData hitCollection = new TriggeredLCIOData(trackerHitCollectionParams); - hitCollection.getData().addAll(hits); - TriggeredLCIOData truthHitCollection = new TriggeredLCIOData(truthHitsCollectionParams); - truthHitCollection.getData().addAll(truthHits); - TriggeredLCIOData truthRelationCollection = new TriggeredLCIOData(truthRelationsCollectionParams); - truthRelationCollection.getData().addAll(trueHitRelations); - - // MC particles need to be extracted from the truth hits - // and included in the readout data to ensure that the - // full truth chain is available. - Set truthParticles = new java.util.HashSet(); - for(SimTrackerHit simHit : truthHits) { - ReadoutDataManager.addParticleParents(simHit.getMCParticle(), truthParticles); - } - - // Create the truth MC particle collection. - LCIOCollectionFactory.setCollectionName("MCParticle"); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollection truthParticleCollection = LCIOCollectionFactory.produceLCIOCollection(MCParticle.class); - TriggeredLCIOData truthParticleData = new TriggeredLCIOData(truthParticleCollection); - truthParticleData.getData().addAll(truthParticles); - - // A trigger timestamp needs to be produced as well. - ReadoutTimestamp timestamp = new ReadoutTimestamp(ReadoutTimestamp.SYSTEM_TRACKER, firstSample); - LCIOCollectionFactory.setCollectionName(ReadoutTimestamp.collectionName); - LCIOCollection timestampCollection = LCIOCollectionFactory.produceLCIOCollection(ReadoutTimestamp.class); - TriggeredLCIOData timestampData = new TriggeredLCIOData(timestampCollection); - timestampData.getData().add(timestamp); - - // Store them in a single collection. - Collection> eventOutput = new ArrayList>(5); - eventOutput.add(hitCollection); - eventOutput.add(truthParticleData); - eventOutput.add(truthHitCollection); - eventOutput.add(truthRelationCollection); - eventOutput.add(timestampData); - - // Return the event output. - return eventOutput; - } - - /** - * Class StripHit is responsible for storing several - * parameters defining a simulated hit object. - */ - private class StripHit implements Comparable { - SiSensor sensor; - int channel; - double amplitude; - double time; - Set simHits; - RawTrackerHit pulserHit; - boolean isPulser=false; - - public StripHit(SiSensor sensor, int channel, double amplitude, double time, Set simHits) { - this.sensor = sensor; - this.channel = channel; - this.amplitude = amplitude; - this.time = time; - this.simHits = simHits; - this.isPulser=false; - } - - public StripHit(SiSensor sensor, int channel, double time, RawTrackerHit pulserHit){ - this.sensor = sensor; - this.channel = channel; - this.pulserHit=pulserHit; - this.time=time; - this.isPulser=false; - } - - public boolean getIsPulser(){return this.isPulser;} - public RawTrackerHit getRawTrackerHit(){return this.pulserHit;} - @Override - public int compareTo(Object o) { - double deltaT = time - ((StripHit) o).time; - if(deltaT > 0) { - return 1; - } else if(deltaT < 0) { - return -1; - } else { - return 0; - } - } - } - - @Override - protected double getTimeDisplacement() { - return 0; - } - - @Override - protected double getTimeNeededForLocalOutput() { - // TODO: Probably should have some defined value - buffer seems to be filled enough from the ecal delay alone, though. - return 100; - } - -} diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java index 71a2c36b2..26f4589ef 100755 --- a/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java @@ -48,7 +48,9 @@ public abstract class RawConverterReadoutDriver extends ReadoutDriver { * conditions database should be skipped when producing hits. */ protected boolean skipBadChannels = false; - + + private double checkAheadTime = 4.0; + protected RawConverterReadoutDriver(String defaultInputCollectionName, String defaultOutputCollectionName) { inputCollectionName = defaultInputCollectionName; outputCollectionName = defaultOutputCollectionName; @@ -71,14 +73,20 @@ public final void detectorChanged(Detector detector) { public final void process(EventHeader event) { // Check the data management driver to determine whether the // input collection is available or not. - if(!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + 4.0)) { + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + if(!doNoSpacing&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + checkAheadTime)) { + if(debug)System.out.println("Skipping RawConverterReadout because collection = "+inputCollectionName+" doesn't exist at "+(localTime+ checkAheadTime)); return; } // Get all of the raw hits in the current clock-cycle. - Collection rawHits = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionName, RawCalorimeterHit.class); + Collection rawHits = ReadoutDataManager.getData(localTime, localTime + checkAheadTime, inputCollectionName, RawCalorimeterHit.class); - // Increment the local time. + + if(debug)System.out.println(this.getClass().getName()+":: collection = "+inputCollectionName+" has "+rawHits.size()+" found between time = "+localTime+" and "+(localTime+checkAheadTime)); + + // Increment the local time. localTime += 4.0; // Pass the raw hits to the raw converter to obtain proper @@ -96,11 +104,11 @@ public final void process(EventHeader event) { if(skipBadChannels && isBadChannel(newHit.getCellID())) { continue; } - + if(debug)System.out.println(this.getClass().getName()+":: made newHit with time = "+newHit.getTime()); // Add the new hit. newHits.add(newHit); } - + if(debug)System.out.println(this.getClass().getName()+":: outputting collection = "+outputCollectionName+" with size = "+newHits.size()); // Add the calorimeter hit collection to the data manager. ReadoutDataManager.addData(outputCollectionName, newHits, CalorimeterHit.class); } @@ -246,4 +254,12 @@ public void setSkipBadChannels(boolean state) { public void setReadoutWindow(int window) { getConverter().setWindowSamples(window); } + /** + * Sets the amount of time (+ ns) to check for possible + * seed clusters. + * @param value - time in ns + */ + public void setCheckAheadTime(double value) { + checkAheadTime = value; + } } diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java index 84ef87c25..1a86cf155 100755 --- a/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java @@ -99,6 +99,15 @@ public class GTPClusterReadoutDriver extends ReadoutDriver { * This is calculated automatically. */ private double localTimeDisplacement = 0; + + /** + * The amount of time (ns) to check ahead/behind + * for ecal clusters. + * This can be large for no-spacing running (like 192) + * but should be 4.0 for spaced running + */ + + private double checkAheadTime = 4.0; // ============================================================== // ==== Driver Parameters ======================================= @@ -185,19 +194,23 @@ public void detectorChanged(Detector etector) { @Override public void process(EventHeader event) { - // Check the data management driver to determine whether the + + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + // Check the data management driver to determine whether the // input collection is available or not. - if(!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + temporalWindow + 4.0)) { - return; + if(!doNoSpacing&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + temporalWindow + checkAheadTime)) { + if(debug)System.out.println("Skipping GTP Readout with because collection doesn't exist at "+(localTime+temporalWindow + checkAheadTime)); + return; } // Get the hits that occur during the present clock-cycle, as // well as the hits that occur in the verification window // both before and after the current clock-cycle. // TODO: Simplify this? - Collection seedCandidates = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionName, CalorimeterHit.class); + Collection seedCandidates = ReadoutDataManager.getData(localTime, localTime + checkAheadTime, inputCollectionName, CalorimeterHit.class); Collection foreHits = ReadoutDataManager.getData(localTime - temporalWindow, localTime, inputCollectionName, CalorimeterHit.class); - Collection postHits = ReadoutDataManager.getData(localTime + 4.0, localTime + temporalWindow + 4.0, inputCollectionName, CalorimeterHit.class); + Collection postHits = ReadoutDataManager.getData(localTime + checkAheadTime, localTime + temporalWindow + checkAheadTime, inputCollectionName, CalorimeterHit.class); // Increment the local time. localTime += 4.0; @@ -208,16 +221,22 @@ public void process(EventHeader event) { allHits.addAll(foreHits); allHits.addAll(seedCandidates); allHits.addAll(postHits); - + if(debug){ + System.out.println(this.getClass().getName()+":: "+inputCollectionName+":: local time = "+localTime+ + " temporalWindow = "+temporalWindow+" checkAheadTime = "+checkAheadTime); + System.out.println(this.getClass().getName()+":: "+inputCollectionName+":: current time = "+ReadoutDataManager.getCurrentTime()+" number of seeds = "+seedCandidates.size()+"; all hits = "+allHits.size()); + } // Store newly created clusters. List gtpClusters = new ArrayList(); // Iterate over all seed hit candidates. seedLoop: for(CalorimeterHit seedCandidate : seedCandidates) { + if(debug)System.out.println(this.getClass().getName()+":: looping through seeds: seed energy = "+seedCandidate.getRawEnergy()); // A seed candidate must meet a minimum energy cut to be // considered for clustering. if(seedCandidate.getRawEnergy() < seedEnergyThreshold) { + if(debug)System.out.println(this.getClass().getName()+":: failed seed energy: threshold = "+seedEnergyThreshold); continue seedLoop; } @@ -254,7 +273,8 @@ public void process(EventHeader event) { // cluster should be formed. gtpClusters.add(createBasicCluster(seedCandidate, clusterHits)); } - + + if(debug)System.out.println(this.getClass().getName()+":: adding gtpClusters to data manager size = "+gtpClusters.size()); // Pass the clusters to the data management driver. ReadoutDataManager.addData(outputCollectionName, gtpClusters, Cluster.class); } @@ -336,7 +356,10 @@ protected Collection> getOnTriggerData(double triggerTime) @Override protected double getTimeDisplacement() { - return localTimeDisplacement; + if(doNoSpacing) + return 0; + else + return localTimeDisplacement; } @Override @@ -384,5 +407,13 @@ public void setClusterWindow(int value) { */ public void setSeedEnergyThreshold(double value) { seedEnergyThreshold = value; - } + } + /** + * Sets the amount of time (+/-ns) to check for possible + * seed clusters. + * @param value - time in ns + */ + public void setCheckAheadTime(double value) { + checkAheadTime = value; + } } diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java index cf74aaf81..9525a47a8 100644 --- a/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java @@ -145,11 +145,14 @@ public void actionPerformed(ActionEvent e) { } @Override - public void process(EventHeader event) { - + public void process(EventHeader event) { + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + // Check the data management driver to determine whether the // input collection is available or not. - if (!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + localTimeDisplacement)) { + if (!doNoSpacing && !ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + localTimeDisplacement)) { + if(debug)System.out.println(this.getClass().getName()+":: "+inputCollectionName+" doesn't exist at time = "+(localTime + localTimeDisplacement)); return; } @@ -162,7 +165,7 @@ public void process(EventHeader event) { Collection fadcHits = ReadoutDataManager.getData( localTime - (persistentTime - timeEarlierThanEcal), localTime + timeEarlierThanEcal + 4.0, inputCollectionName, CalorimeterHit.class); - + if(debug)System.out.println(this.getClass().getName()+":: number of fadcHits found = "+fadcHits.size()); // Increment the local time. localTime += 4.0; @@ -279,6 +282,7 @@ public void process(EventHeader event) { } // At leaset there is a hodo tilt/cluster hit in any layer, then the pattern list is added into data manager + if(flag == true && debug) if(debug)System.out.println(this.getClass().getName()+":: outputting "+outputCollectionName+" with size = "+hodoPatterns.size()); if(flag == true) ReadoutDataManager.addData(outputCollectionName, hodoPatterns, HodoscopePattern.class); } @@ -345,7 +349,10 @@ private void populateChannelCollections() { @Override protected double getTimeDisplacement() { - return localTimeDisplacement; + if(doNoSpacing) + return 0; + else + return localTimeDisplacement; } @Override @@ -421,4 +428,5 @@ public void setTimeEarlierThanEcal(double timeEarlierThanEcal) { public void setGainFactor(double gainFactor) { this.gainFactor = gainFactor; } + } diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java index ac3c6d1fa..c4e7420bd 100644 --- a/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java @@ -139,20 +139,40 @@ public void detectorChanged(Detector detector) { @Override public void process(EventHeader event) { // Check that clusters are available for the trigger. + // System.out.println(this.getClass().getName()+":: starting trigger determination"); Collection clusters = null; Collection hodoPatterns = null; ArrayList hodoPatternList = null; - if(ReadoutDataManager.checkCollectionStatus(inputCollectionNameEcal, localTime) && ReadoutDataManager.checkCollectionStatus(inputCollectionNameHodo, localTime)) { + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + + if(ReadoutDataManager.checkCollectionStatus(inputCollectionNameEcal, localTime) && ReadoutDataManager.checkCollectionStatus(inputCollectionNameHodo, localTime)) { + if(debug) System.out.println(this.getClass().getName()+":: checkCollectionStatus worked. Getting collection in time window = ["+localTime+","+(localTime+4.0)+"]"); clusters = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionNameEcal, Cluster.class); hodoPatterns = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionNameHodo, HodoscopePattern.class); + if(debug) System.out.println(this.getClass().getName()+":: checkCollectionStatus worked Ecal size = "+clusters.size()+" Hodo size = "+ hodoPatterns.size()); localTime += 4.0; - - if(clusters.size() == 0 || hodoPatterns.size() == 0) return; - + //this is backwards of what I wanted, but whatever... + // if(doNoSpacing&&(clusters.size() == 0 || hodoPatterns.size() == 0)) return; + // if(!doNoSpacing&&(clusters.size() == 0 && hodoPatterns.size() == 0)) return; + // if(doNoSpacing&&(clusters.size() == 0 || hodoPatterns.size() == 0)) return; + + //just quit if 0 clusters. + // if(clusters.size() == 0) + // return; + /* + * I feel like this should be "and" as one of + * the triggers doesn't require hodo, right? + */ + //this is the cut that's in master + if(clusters.size() == 0 || hodoPatterns.size() == 0) return; hodoPatternList = new ArrayList<>(hodoPatterns); - } else { return; } + } else { + if(debug)System.out.println(this.getClass().getName()+":: checkCollectionStatus did not find one of Ecal or Hodo at time = "+localTime); + return; + } // Track whether or not a trigger was seen. boolean triggered = false; @@ -160,7 +180,10 @@ public void process(EventHeader event) { // There is no need to perform the trigger cuts if the // trigger is in dead time, as no trigger may be issued // regardless of the outcome. - if(isInDeadTime()) { return; } + if(isInDeadTime()) { + if(debug)System.out.println(this.getClass().getName()+":: trigger is in dead-time!!!"); + return; + } // Record top/bot status for singles triggers List topBot = new ArrayList(); @@ -172,7 +195,10 @@ public void process(EventHeader event) { // not available during readout, so crystal indices must // be obtained directly from the calorimeter geometry. java.awt.Point ixy = ecal.getCellIndices(cluster.getCalorimeterHits().get(0).getCellID()); - + System.out.println(this.getClass().getName()+ + ":: looping over clusters; number of hits = "+TriggerModule2019.getClusterHitCount(cluster) + +" seed energy value = " + TriggerModule2019.getValueClusterSeedEnergy(cluster) + +" total energy of cluster = "+ TriggerModule2019.getValueClusterTotalEnergy(cluster)); // Populate the uncut plots. clusterSeedEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterSeedEnergy(cluster)); clusterTotalEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterTotalEnergy(cluster)); @@ -181,17 +207,22 @@ public void process(EventHeader event) { // Perform the hit count cut. if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_HIT_COUNT_LOW_EN) && !triggerModule.clusterHitCountCut(cluster)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster hit cout (low)"); continue; } // Perform the cluster energy cut. if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_LOW_EN) && !triggerModule.clusterTotalEnergyCutLow(cluster)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster energy cut (low)"); continue; } if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_HIGH_EN) && !triggerModule.clusterTotalEnergyCutHigh(cluster)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster energy cout (high)"); continue; } + + System.out.println(this.getClass().getName()+":: made it past basic cluster cuts"); // In the setup calorimeter geometry, range of X coordinates is [-23, -1] and [1, 23]. // The hardware uses cluster X coordinates [-22,0] and [1,23]. @@ -203,19 +234,30 @@ public void process(EventHeader event) { // XMin is at least 0. if(!triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_XMIN_EN) && !triggerModule.clusterXMinCut(clusterX)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster x cut (low)"); continue; } - + System.out.println(this.getClass().getName()+":: made it past xMin cut "); // XMin cut has been applied. if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_PDE_EN) && !triggerModule.clusterPDECut(cluster, clusterX)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster PDE cut"); continue; - } - } - - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && !triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { - continue; + } + System.out.println(this.getClass().getName()+":: made it past PDE cut "); + } + // if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && !triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { + //put in check for hodoscope pattern collection size here + if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && hodoPatterns.size()>0){ + if(!triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster-hodo matching cut"); + continue; + } + System.out.println(this.getClass().getName()+":: made it past cluster-hodo matching cut "); + + } + if(debug)System.out.println(this.getClass().getName()+":: made it through all non-moller cuts"); //For 2021 update, Moller triggers if(triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterXMinCut(clusterX)) { @@ -236,7 +278,9 @@ public void process(EventHeader event) { } // Note that a trigger occurred. triggered = true; - + if(debug) + if(debug)System.out.println(this.getClass().getName()+":: found a trigger!"); + if(ixy.y > 0) topBot.add(TOP); else topBot.add(BOT); @@ -248,6 +292,7 @@ public void process(EventHeader event) { } if(triggered) { + if(debug)System.out.println(this.getClass().getName()+":: sending trigger!!!"); boolean topStat = false; boolean botStat = false; if(topBot.contains(TOP)) topStat = true; diff --git a/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java b/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java index 858405a1d..20b36619c 100755 --- a/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java +++ b/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java @@ -127,6 +127,9 @@ public class ReadoutDataManager extends Driver { private static final String nl = String.format("%n"); private static final Logger logger = Logger.getLogger(ReadoutDataManager.class.getSimpleName()); + + private static boolean debug=false; + @Override public void startOfData() { @@ -225,8 +228,13 @@ public void endOfData() { public void process(EventHeader event) { // Check the trigger queue. if(!triggerQueue.isEmpty()) { - // Check the earliest possible trigger write time. + if(debug)System.out.println(this.getClass().getName()+" found a trigger @ "+triggerQueue.peek().getTriggerTime()+"; current time is "+ getCurrentTime()); + // Check the earliest possible trigger write time. boolean isWritable = getCurrentTime() >= triggerQueue.peek().getTriggerTime() + bufferTotal; + if(debug && !isWritable) + System.out.println(this.getClass().getName()+":: can't write this trigger yet because "+getCurrentTime()+" < "+(triggerQueue.peek().getTriggerTime() + bufferTotal)); + + // If all collections are available to be written, the // event should be output. if(isWritable) { @@ -250,8 +258,16 @@ public void process(EventHeader event) { if(!triggerQueue.isEmpty()) nextTrigger = triggerQueue.peek(); } } - triggers++; + double roughTimeOfEvent=getCurrentTime(); + if(effectiveBunches==1){ //we are doing spaced simulation + //342ns is the typical time to do readout/triggering...subtract this off to compare to spaced + if(debug) + System.out.println(getClass().getName()+":: subtracting 342ns to spaced event to get rough time"); + roughTimeOfEvent=roughTimeOfEvent-342.0; + } + if(debug) + System.out.println(getClass().getName()+":: found trigger number = "+triggers+" at current time = "+roughTimeOfEvent); // Make a new LCSim event. int triggerEventNumber = event.getEventNumber() - ((int) Math.floor((getCurrentTime() - trigger.getTriggerTime()) / 2.0)); @@ -426,7 +442,9 @@ else if(topBot.equals(TriggerDriver.TOPBOT)){ for(TriggeredLCIOData triggerData : triggeredDataMap.values()) { storeCollection(triggerData, lcsimEvent); } - + if(debug) + System.out.println(getClass().getName()+":: writing event!!!"); + // Write the event to the output file. try { outputWriter.write(lcsimEvent); } catch(IOException e) { @@ -443,8 +461,11 @@ else if(topBot.equals(TriggerDriver.TOPBOT)){ data.getData().removeFirst(); } } - - // Increment the current time. + if(debug){ + System.out.println("ReadoutDataManager:: end of event with current time = "+currentTime); + System.out.println("##################### END #################################"); + } + // Increment the current time. currentTime += effectiveBunches*BEAM_BUNCH_SIZE; } @@ -480,7 +501,17 @@ public static final void addData(String collectionName, double dataTime, Col throw new IllegalArgumentException("Error: Saw data type \"" + dataType.getSimpleName() + "\" but expected data type \"" + collectionData.getCollectionParameters().getObjectType().getSimpleName() + "\" instead."); } - + //mg debug + /* + if(debug && Double.isNaN(dataTime)){ + System.out.println("ReadoutDataDriver:: addData no time given. "+ dataType.getName()+": currentTime = "+currentTime+"; global displacement = "+(collectionData.getCollectionParameters().getGlobalTimeDisplacement())); + System.out.println("ReadoutDataDriver:: addData setting time to = "+(currentTime - collectionData.getCollectionParameters().getGlobalTimeDisplacement())); + } else { + System.out.println("ReadoutDataDriver:: addData time provided; setting time to = "+dataTime); + } + */ + // + // If the data is empty, then there is no need to add it to // the buffer. if(!data.isEmpty()) { @@ -998,15 +1029,19 @@ private static final List getDataList(double startTime, double endTime, S // Throw an alert if the earliest requested time precedes the // earliest buffered time, and similarly for the latest time. LinkedList> dataLists = collectionData.getData(); - + //System.out.println("ReadoutDataManager::getDataList number in dataLists of objectType: "+objectType.getName()+" = "+dataLists.size()); // Iterate through the data and collect all entries that have // an associated truth time within the given time range. The // lower bound is inclusive, the upper bound is exclusive. List outputList = new ArrayList(); for(TimedList dataList : dataLists) { + // if(debug) + // System.out.println("ReadoutDataManager::getDataList dataList found at time = "+dataList.getTime()+" looking in time window ["+startTime+"--"+endTime+"]"); if(dataList.getTime() >= startTime && dataList.getTime() < endTime) { // Add the items from the list to the output list. for(Object o : dataList) { + //if(debug) + // System.out.println("ReadoutDataManager:: dataList of type "+o.getClass().getName()+" found in the time window ["+startTime+"--"+endTime+"]"); if(objectType.isAssignableFrom(o.getClass())) { outputList.add(objectType.cast(o)); } else { @@ -1215,5 +1250,9 @@ public static final void setEffectiveBunches(int value){ public static final void setZeroBuffer(boolean zero){ zeroBuffer=zero; } + + public static final void setDebug(boolean value){ + debug=value; + } } diff --git a/record-util/src/main/java/org/hps/readout/ReadoutDriver.java b/record-util/src/main/java/org/hps/readout/ReadoutDriver.java index e781fc0df..ff7fa6cba 100755 --- a/record-util/src/main/java/org/hps/readout/ReadoutDriver.java +++ b/record-util/src/main/java/org/hps/readout/ReadoutDriver.java @@ -120,7 +120,13 @@ public abstract class ReadoutDriver extends Driver { * this collection data should be written. */ private double readoutWindowBefore = Double.NaN; - + /** + * Boolean to chose no-spacing readout mode. + * Should be false for MC generated beam background + */ + public boolean doNoSpacing = false; + + public boolean debug=false; /** * Instantiates the readout driver. */ @@ -272,4 +278,16 @@ public void setReadoutWindowAfter(double value) throws UnsupportedOperationExcep public void setReadoutWindowBefore(double value) throws UnsupportedOperationException { readoutWindowBefore = value; } + /** + * Sets do-no-spacing readout mode + * used for unspaced signal events + * @param value - true/false do no spacing + */ + public void setDoNoSpacing(boolean value) { + doNoSpacing = value; + } + + public void setDebug(boolean value){ + debug=value; + } } diff --git a/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim new file mode 100644 index 000000000..418824cf9 --- /dev/null +++ b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim @@ -0,0 +1,470 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + EcalHits + + + 8.0 + 32.0 + false + + + + MCParticle + + + 32.0 + 32.0 + false + + + + HodoscopeHits + + + 8.0 + 32.0 + false + + + + TrackerHits + + + 8.0 + 32.0 + false + + + + + EcalReadoutHits + + + 32.0 + 32.0 + false + + + + HodoReadoutHits + + + 32.0 + 32.0 + false + + + + + SVTRawTrackerHits + + + + + + SVTRawTrackerHits + + 32.0 + 32.0 + false + + + + + + EcalHits + PulserDataEcalReadoutHits + EcalRawHits + EcalReadoutHits + EcalTruthRelations + TriggerPathTruthRelations + + true + + + 1 + + false + true + false + + 56 + + + true + + + false + + + + + + EcalRawHits + EcalCorrectedHits + + true + + + 192. + true + false + false + + + + + + true + 48 + + true + 192. + true + true + + + + + HodoscopeHits + HodoscopePreprocessedHits + + true + + + + + + + HodoscopePreprocessedHits + PulserDataHodoReadoutHits + HodoscopeRawHits + HodoscopeReadoutHits + HodoscopeTruthRelations + HodoscopeTriggerPathTruthRelations + + true + + + 1 + false + true + + + 4 + + + 0.000833333 + + + false + + + false + true + 0.0001 + + + + + + HodoscopeRawHits + HodoscopeCorrectedHits + true + true + + + 1 + + 192. + true + false + true + + + + + + + + HodoscopeCorrectedHits + HodoscopePatterns + + true + + false + true + true + + + + + EcalClustersGTP + + HodoscopePatterns + + true + + + 0 + true + true + + + + + EcalClustersGTP + + HodoscopePatterns + + singles2 + + true + + + 0 + true + true + + + + + + PulserDataSVTRawTrackerHits + false + true + false + + false + 20.0 + 0.0 + + + + + 250 + true + 200 + ${outputFile}.slcio + true + + + + + diff --git a/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim new file mode 100644 index 000000000..8747c33ee --- /dev/null +++ b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim @@ -0,0 +1,252 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 10 + + + + + + WARNING + EcalClusters + 0.030 + -5.0 + + + EcalClusters + EcalClustersCorr + + + + HodoscopeReadoutHits + CONFIG + true + + + true + HodoscopeReadoutHits + 8 + CONFIG + true + + + + + SVTRawTrackerHits + + + + Pileup + Migrad + + false + + + + true + + true + + false + + true + + true + + true + true + false + + + + 50 + false + + + + false + 100.0 + 100.0 + 400.0 + + + + Tracks_s123_c4_e56 + HPS_s123_c4_e56_4hit.xml + false + 1000.0 + 250 + + + Tracks_s123_c5_e46 + HPS_s123_c5_e46_4hit.xml + false + 1000.0 + 250 + + + Tracks_s567_c4_e123 + HPS_s567_c4_e123.xml + false + 1000.0 + 250 + + + Tracks_s456_c3_e127 + HPS_s456_c3_e127.xml + false + 1000.0 + 250 + + + Tracks_s356_c7_e124 + HPS_s356_c7_e124.xml + false + 1000.0 + 250 + + + Tracks_s235_c6_e147 + HPS_s235_c6_e147.xml + false + 1000.0 + 250 + + + Tracks_s234_c5_e157 + HPS_s234_c5_e167_4hit.xml + false + 1000.0 + 250 + + + + + EcalClustersCorr + GBLTracks + GBLTracks + TrackClusterMatcherMinDistance + 0 + 0.05 + 0 + 0.02 + -7.5 + 28 + 1000 + false + 0.0 + 7.0 + 7.0 + false + false + true + + + EcalClustersCorr + KalmanFullTracks + KalmanFullTracks + TrackClusterMatcherMinDistance + UnconstrainedV0Candidates_KF + UnconstrainedV0Vertices_KF + BeamspotConstrainedV0Candidates_KF + BeamspotConstrainedV0Vertices_KF + TargetConstrainedV0Candidates_KF + TargetConstrainedV0Vertices_KF + FinalStateParticles_KF + OtherElectrons_KF + 0 + 0.05 + 0 + 0.02 + -7.5 + 28 + 1000 + false + 0.0 + 7.0 + 7.0 + false + true + true + true + + + + + true + + + KalmanFullTracks + true + false + + + GBLTracks + false + false + + + ${outputFile}.slcio + + + + + ${outputFile}.root + true + 0.0 + + KalmanFullTracks + 0.1 + 4.8 + 9999 + true + true + + + + + From 33e3358dc58774940f80fd68e6fa2653d80aa12d Mon Sep 17 00:00:00 2001 From: Matt Graham Date: Thu, 12 Dec 2024 12:21:47 -0800 Subject: [PATCH 4/8] some tweaks to the unspaced readout so that the number of presamples match spaced MC for both ecal and svt; change recon raw hit fitter driver to useTimestamps for mc; add some plots to KFOutputDriver --- .../readout/TestNoSpacingModifyCurrent.lcsim | 28 +++++++++-------- .../recon/PhysicsRun2019MCRecon_KF.lcsim | 8 +++-- .../recon/tracking/kalman/KFOutputDriver.java | 30 +++++++++++++++---- 3 files changed, 45 insertions(+), 21 deletions(-) diff --git a/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim index 418824cf9..5b4a7ebbb 100644 --- a/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim +++ b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim @@ -77,7 +77,8 @@ --> EcalHits - + false + 8.0 32.0 @@ -95,7 +96,8 @@ HodoscopeHits - + false + 8.0 32.0 @@ -182,14 +184,14 @@ false true - false + true - 56 + 55 false - true + false 0.0001 @@ -364,7 +366,7 @@ --> HodoscopeRawHits HodoscopeCorrectedHits - true + false true @@ -424,7 +426,7 @@ 0 true - true + @@ -440,7 +442,7 @@ 0 true - true + @@ -453,7 +455,7 @@ false 20.0 - 0.0 + 24.0 @@ -462,7 +464,7 @@ true 200 ${outputFile}.slcio - true + false diff --git a/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim index 8747c33ee..563395d91 100644 --- a/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim +++ b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim @@ -80,9 +80,11 @@ Pileup Migrad - false + true - + + + 114 true @@ -92,7 +94,7 @@ true - true + false true true diff --git a/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java b/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java index 81701c4a9..c8bc20d8e 100644 --- a/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java +++ b/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java @@ -81,6 +81,7 @@ public class KFOutputDriver extends Driver { private boolean b_doKFresiduals = true; private boolean b_doDetailPlots = false; private boolean b_doRawHitPlots = true; + private boolean b_doAllRawHitPlots = true; //The field map for extrapolation private FieldMap bFieldMap; @@ -271,7 +272,19 @@ public void process(EventHeader event) { TrackClusterPairs.put(track,cluster); } } - + //plot all raw hits + if(b_doAllRawHitPlots){ + for (LCRelation fittedHit : _fittedHits) { + RawTrackerHit rth=FittedRawTrackerHit.getRawTrackerHit(fittedHit); + HpsSiSensor sensor = (HpsSiSensor) rth.getDetectorElement(); + double t0 = FittedRawTrackerHit.getT0(fittedHit); + double amplitude = FittedRawTrackerHit.getAmp(fittedHit); + double chi2Prob = ShapeFitParameters.getChiProb(FittedRawTrackerHit.getShapeFitParameters(fittedHit)); + aidaKF.histogram1D(hitFolder+"all_raw_hit_t0_"+sensor.getName()).fill(t0); + aidaKF.histogram1D(hitFolder+"all_raw_hit_amplitude_"+sensor.getName()).fill(amplitude); + aidaKF.histogram1D(hitFolder+"all_raw_hit_chisq_"+sensor.getName()).fill(chi2Prob); + } + } int nTracks=tracks.size(); if(debug) System.out.println(this.getClass()+":: found "+nTracks + " tracks"); @@ -323,6 +336,9 @@ public void process(EventHeader event) { for (TrackerHit hit : trk.getTrackerHits()) { HpsSiSensor sensor = ((HpsSiSensor) ((RawTrackerHit) hit.getRawHits().get(0)).getDetectorElement()); if (sensor != null) { + // System.out.println(this.getClass().getName()+":: hit on layer = "+((RawTrackerHit) hit.getRawHits().get(0)).getLayer()+" has time = "+hit.getTime()); + int stripLayer = ((HpsSiSensor) ((RawTrackerHit) hit.getRawHits().get(0)).getDetectorElement()).getLayerNumber(); + // System.out.println(this.getClass().getName()+":: hit on layer = "+stripLayer+" has time = "+hit.getTime()); sensorHits.put(sensor, hit); } @@ -330,6 +346,7 @@ public void process(EventHeader event) { System.out.printf("TrackerHit null sensor %s \n", hit.toString()); } _trkTimeSigma=getTrackTime(sensorHits); + // System.out.println(this.getClass().getName()+":: track time = "+_trkTimeSigma.getFirstElement()); doBasicKFtrack(trk,sensorHits); if (b_doKFresiduals) doKFresiduals(trk, sensorHits,event); @@ -693,7 +710,8 @@ private void doBasicKFtrack(Track trk, Map sensorHits) } } } - if (b_doDetailPlots) { + + if (b_doDetailPlots) { int ibins = 15; double start= -12; double end = -5; @@ -988,7 +1006,7 @@ private void setupEoPPlots() { for (String charge : charges) { //put the trk-cluster time in trkpFolder - aidaKF.histogram1D(trkpFolder+"trk-cluTime"+charge+vol,100,-20,20); + aidaKF.histogram1D(trkpFolder+"trk-cluTime"+charge+vol,100,-75,75); aidaKF.histogram2D(eopFolder+"EoP_vs_trackP"+charge+vol+"_fid",200,0,6,200,0,2); aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda"+charge+vol+"_fid",200,0.01,0.08,200,0,2); @@ -1118,7 +1136,9 @@ private void setupPlots() { aidaKF.histogram1D(hitFolder+"raw_hit_amplitude_"+sensor.getName(),200, 0.0, 4000.0); aidaKF.histogram1D(hitFolder+"raw_hit_chisq_"+sensor.getName(),200, 0.0, 2.0); - + aidaKF.histogram1D(hitFolder+"all_raw_hit_t0_"+sensor.getName(),200, -100, 100.0); + aidaKF.histogram1D(hitFolder+"all_raw_hit_amplitude_"+sensor.getName(),200, 0.0, 4000.0); + aidaKF.histogram1D(hitFolder+"all_raw_hit_chisq_"+sensor.getName(),200, 0.0, 2.0); xmax = 0.0006; if(l==1){ @@ -1167,7 +1187,7 @@ private void setupPlots() { aidaKF.histogram1D(trkpFolder+"z0"+vol+charge,nbins_t,-1.3,1.3); aidaKF.histogram1D(trkpFolder+"phi"+vol+charge,nbins_t,-0.06,0.06); aidaKF.histogram1D(trkpFolder+"tanLambda"+vol+charge,nbins_t,-0.2,0.2); - aidaKF.histogram1D(trkpFolder+"trkTime"+vol+charge,nbins_t,-20,20); + aidaKF.histogram1D(trkpFolder+"trkTime"+vol+charge,nbins_t,-75,75); aidaKF.histogram1D(trkpFolder+"trkTimeSD"+vol+charge,nbins_t,0,10); aidaKF.histogram1D(trkpFolder+"p"+vol+charge,nbins_p,0.,pmax); From 558676bb0191a3ab32198144eb5084f24ebd7c26 Mon Sep 17 00:00:00 2001 From: Matt Graham Date: Fri, 13 Dec 2024 10:11:02 -0800 Subject: [PATCH 5/8] Make a steering file for unspaced MC recon --- ... PhysicsRun2019MCRecon_KF_NoSpacing.lcsim} | 110 ++---------------- 1 file changed, 7 insertions(+), 103 deletions(-) rename steering-files/src/main/resources/org/hps/steering/recon/{PhysicsRun2019MCRecon_KF.lcsim => PhysicsRun2019MCRecon_KF_NoSpacing.lcsim} (59%) diff --git a/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim similarity index 59% rename from steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim rename to steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim index 563395d91..77b001ee0 100644 --- a/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim +++ b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim @@ -17,23 +17,10 @@ - - - - - + - - @@ -83,8 +70,7 @@ true - - 114 + 0 true @@ -108,85 +94,7 @@ 50 false - - - false - 100.0 - 100.0 - 400.0 - - - - Tracks_s123_c4_e56 - HPS_s123_c4_e56_4hit.xml - false - 1000.0 - 250 - - - Tracks_s123_c5_e46 - HPS_s123_c5_e46_4hit.xml - false - 1000.0 - 250 - - - Tracks_s567_c4_e123 - HPS_s567_c4_e123.xml - false - 1000.0 - 250 - - - Tracks_s456_c3_e127 - HPS_s456_c3_e127.xml - false - 1000.0 - 250 - - - Tracks_s356_c7_e124 - HPS_s356_c7_e124.xml - false - 1000.0 - 250 - - - Tracks_s235_c6_e147 - HPS_s235_c6_e147.xml - false - 1000.0 - 250 - - - Tracks_s234_c5_e157 - HPS_s234_c5_e167_4hit.xml - false - 1000.0 - 250 - - - - - EcalClustersCorr - GBLTracks - GBLTracks - TrackClusterMatcherMinDistance - 0 - 0.05 - 0 - 0.02 - -7.5 - 28 - 1000 - false - 0.0 - 7.0 - 7.0 - false - false - true - + EcalClustersCorr KalmanFullTracks @@ -204,7 +112,7 @@ 0.05 0 0.02 - -7.5 + 0.0 28 1000 false @@ -214,7 +122,7 @@ false true true - true + false @@ -226,11 +134,7 @@ true false - - GBLTracks - false - false - + ${outputFile}.slcio @@ -238,7 +142,7 @@ ${outputFile}.root - true + false 0.0 KalmanFullTracks From 12251bd994c664630ceac4e04bc4c828780f17d0 Mon Sep 17 00:00:00 2001 From: Sarah Gaiser Date: Wed, 8 Jan 2025 02:55:21 -0800 Subject: [PATCH 6/8] make PhysicsRun2019MCRecon_KF_NoSpacing.lcsim match PhysicsRun2019MCRecon_KF.lcsim in master --- .../PhysicsRun2019MCRecon_KF_NoSpacing.lcsim | 72 ++++++++++++++++--- 1 file changed, 61 insertions(+), 11 deletions(-) diff --git a/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim index 77b001ee0..29e1669bc 100644 --- a/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim +++ b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim @@ -28,7 +28,7 @@ - 10 + 1000 @@ -64,7 +64,9 @@ - Pileup + .5 + 1 + Pileup Migrad true @@ -91,7 +93,16 @@ nearest neighbor algorithm. --> - 50 + 24.0 + 3.0 + false + 400 + 4.0 + 1.0 + 3.0 + 3.0 + true + true false @@ -109,25 +120,64 @@ FinalStateParticles_KF OtherElectrons_KF 0 - 0.05 + 0.3 0 0.02 0.0 - 28 - 1000 + 25 + 40 false 0.0 - 7.0 + 40.0 + 7.0 7.0 - false - true + false + false + true + true true - false + false + true + UnconstrainedMollerCandidates_KF + UnconstrainedMollerVertices_KF + BeamspotConstrainedMollerCandidates_KF + BeamspotConstrainedMollerVertices_KF + TargetConstrainedMollerCandidates_KF + TargetConstrainedMollerVertices_KF - true + 3 + 1 + 8.757651 + 38.0487 + 3.98915 + 11.777395 + 0 + 3 + 3 + 39.95028 + 8.186345 + 13.71568 + 13.52662 + 7.00678 + 13.967129 + 9.771546584 + 1.7652935 + 5 + false + 466 + .725912 + + 0.0 + 0.02 + 0.0 + 0.05 + 0.0 + 1.0 + 7.204329 + false KalmanFullTracks From 602fd12488e1846ee24938487eab390a8981c3dd Mon Sep 17 00:00:00 2001 From: Sarah Gaiser Date: Thu, 9 Jan 2025 10:10:04 -0800 Subject: [PATCH 7/8] updating KFOutputDriver to be compatible with master --- .../PhysicsRun2019MCRecon_KF_NoSpacing.lcsim | 14 +++---- .../recon/tracking/kalman/KFOutputDriver.java | 38 +++++++++++++++++-- 2 files changed, 42 insertions(+), 10 deletions(-) diff --git a/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim index 29e1669bc..a2b37866d 100644 --- a/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim +++ b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim @@ -151,20 +151,20 @@ 3 1 8.757651 - 38.0487 - 3.98915 - 11.777395 + 38.0487 + 3.98915 + 11.777395 0 3 3 39.95028 8.186345 13.71568 - 13.52662 - 7.00678 + 13.52662 + 7.00678 13.967129 - 9.771546584 - 1.7652935 + 9.771546584 + 1.7652935 5 false 466 diff --git a/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java b/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java index c8bc20d8e..fa1fd313e 100644 --- a/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java +++ b/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java @@ -272,6 +272,7 @@ public void process(EventHeader event) { TrackClusterPairs.put(track,cluster); } } + //plot all raw hits if(b_doAllRawHitPlots){ for (LCRelation fittedHit : _fittedHits) { @@ -285,6 +286,7 @@ public void process(EventHeader event) { aidaKF.histogram1D(hitFolder+"all_raw_hit_chisq_"+sensor.getName()).fill(chi2Prob); } } + int nTracks=tracks.size(); if(debug) System.out.println(this.getClass()+":: found "+nTracks + " tracks"); @@ -339,6 +341,11 @@ public void process(EventHeader event) { // System.out.println(this.getClass().getName()+":: hit on layer = "+((RawTrackerHit) hit.getRawHits().get(0)).getLayer()+" has time = "+hit.getTime()); int stripLayer = ((HpsSiSensor) ((RawTrackerHit) hit.getRawHits().get(0)).getDetectorElement()).getLayerNumber(); // System.out.println(this.getClass().getName()+":: hit on layer = "+stripLayer+" has time = "+hit.getTime()); + + if(debug){ + System.out.println(this.getClass().getName()+":: inserting hit on sensor = "+sensor.getName()); + } + sensorHits.put(sensor, hit); } @@ -346,7 +353,9 @@ public void process(EventHeader event) { System.out.printf("TrackerHit null sensor %s \n", hit.toString()); } _trkTimeSigma=getTrackTime(sensorHits); + // System.out.println(this.getClass().getName()+":: track time = "+_trkTimeSigma.getFirstElement()); + doBasicKFtrack(trk,sensorHits); if (b_doKFresiduals) doKFresiduals(trk, sensorHits,event); @@ -711,7 +720,8 @@ private void doBasicKFtrack(Track trk, Map sensorHits) } } - if (b_doDetailPlots) { + if (b_doDetailPlots) { + int ibins = 15; double start= -12; double end = -5; @@ -734,7 +744,12 @@ private void doKFresiduals(Track trk, Map sensorHits, E for (HpsSiSensor sensor : sensorHits.keySet()) { //Also fill here the sensorMPIDs map - sensorMPIDs.put(sensor.getMillepedeId(),sensor); + + if(debug){ + System.out.println(this.getClass().getName()+":: mapping "+sensor.getMillepedeId()+" to " + sensor.getName()); + } + sensorMPIDs.put(sensor.getMillepedeId(),sensor); + ITransform3D trans = sensor.getGeometry().getGlobalToLocal(); // position of hit (track crossing the sensor before kf extrapolation) @@ -846,15 +861,22 @@ private void doKFresiduals(Track trk, Map sensorHits, E } int nres = (trackRes.getNInt()-1); + //int nres = trk.getTrackerHits().size(); + + if(debug){ + System.out.println(this.getClass().getName()+":: number entries in trackRes = "+nres); + } String vol = "_top"; if (trk.getTrackStates().get(0).getTanLambda() < 0) vol = "_bottom"; // get the unbias for (int i_hit =0; i_hit < nres ; i_hit+=1) { if (trackRes.getIntVal(i_hit)!=-999) { + //Measured hit HpsSiSensor hps_sensor = sensorMPIDs.get(trackRes.getIntVal(i_hit)); + Hep3Vector hitPosG = new BasicHep3Vector(sensorHits.get(hps_sensor).getPosition()); Hep3Vector hitPosSensorG = new BasicHep3Vector(hitPosG.v()); ITransform3D g2l = hps_sensor.getGeometry().getGlobalToLocal(); @@ -898,6 +920,7 @@ private void doKFresiduals(Track trk, Map sensorHits, E aidaKF.histogram2D(resFolder+"uresidual_KF_vs_v_pred_" + sensorName).fill(extrapPosSensor.y(),trackRes.getDoubleVal(i_hit)); aidaKF.histogram1D(epullFolder+"ureserror_KF_" + sensorName).fill(trackRes.getFloatVal(i_hit)); aidaKF.histogram1D(epullFolder+"ures_pull_KF_" + sensorName).fill(trackRes.getDoubleVal(i_hit) / trackRes.getFloatVal(i_hit)); + //Get the hit time double hitTime = sensorHits.get(hps_sensor).getTime(); @@ -1006,7 +1029,9 @@ private void setupEoPPlots() { for (String charge : charges) { //put the trk-cluster time in trkpFolder + aidaKF.histogram1D(trkpFolder+"trk-cluTime"+charge+vol,100,-75,75); + aidaKF.histogram2D(eopFolder+"EoP_vs_trackP"+charge+vol+"_fid",200,0,6,200,0,2); aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda"+charge+vol+"_fid",200,0.01,0.08,200,0,2); @@ -1060,6 +1085,7 @@ private void setupPlots() { int mod_2dplot_bins = sensors.size()+mod*2; for (String vol : volumes) { + aidaKF.histogram1D(resFolder+"bresidual_KF"+vol,nbins, -xmax, xmax); aidaKF.histogram1D(resFolder+"uresidual_KF"+vol,nbins, -xmax, xmax); aidaKF.histogram1D(resFolder+"bresidual_KF"+vol+"_L1L4",nbins,-xmax,xmax); @@ -1079,6 +1105,7 @@ private void setupPlots() { //Hits vs channel + int nch = 400; aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL1b",nch,0,nch,nch,0,nch); aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL2b",nch,0,nch,nch,0,nch); @@ -1095,7 +1122,6 @@ private void setupPlots() { aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL5t",nch,0,nch,nch,0,nch); aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL6t",nch,0,nch,nch,0,nch); aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL7t",nch,0,nch,nch,0,nch); - for (SiSensor sensor : sensors) { @@ -1105,7 +1131,9 @@ private void setupPlots() { nbins = 250; int l = (sens.getLayerNumber() + 1) / 2; if (l > 1) xmax = 0.05 + (l - 1) * 0.08; + aidaKF.histogram1D(resFolder+"residual_before_KF_" + sensor.getName(), nbins, -xmax, xmax); + xmax = 0.250; @@ -1123,6 +1151,7 @@ private void setupPlots() { aidaKF.histogram1D(epullFolder+"breserror_KF_" + sensor.getName(), nbins, 0.0, 0.1); aidaKF.histogram1D(epullFolder+"ureserror_KF_" + sensor.getName(), nbins, 0.0, 0.2); aidaKF.histogram1D(epullFolder+"bres_pull_KF_" + sensor.getName(), nbins, -5, 5); + aidaKF.histogram1D(epullFolder+"ures_pull_KF_" + sensor.getName(), nbins, -5, 5); aidaKF.histogram2D(resFolder+"residual_after_KF_vs_u_hit_" + sensor.getName(), 100, -20.0, 20.0, 100, -0.04, 0.04); @@ -1139,6 +1168,7 @@ private void setupPlots() { aidaKF.histogram1D(hitFolder+"all_raw_hit_t0_"+sensor.getName(),200, -100, 100.0); aidaKF.histogram1D(hitFolder+"all_raw_hit_amplitude_"+sensor.getName(),200, 0.0, 4000.0); aidaKF.histogram1D(hitFolder+"all_raw_hit_chisq_"+sensor.getName(),200, 0.0, 2.0); + xmax = 0.0006; if(l==1){ @@ -1188,6 +1218,7 @@ private void setupPlots() { aidaKF.histogram1D(trkpFolder+"phi"+vol+charge,nbins_t,-0.06,0.06); aidaKF.histogram1D(trkpFolder+"tanLambda"+vol+charge,nbins_t,-0.2,0.2); aidaKF.histogram1D(trkpFolder+"trkTime"+vol+charge,nbins_t,-75,75); + aidaKF.histogram1D(trkpFolder+"trkTimeSD"+vol+charge,nbins_t,0,10); aidaKF.histogram1D(trkpFolder+"p"+vol+charge,nbins_p,0.,pmax); @@ -1314,3 +1345,4 @@ private LCRelation getFittedHit(RawTrackerHit rawHit) { return fittedRawTrackerHitMap.get(rawHit); } } + From 59ef90fccd6a508db7006f58f7ad17006b495cff Mon Sep 17 00:00:00 2001 From: Matt Graham Date: Mon, 27 Jan 2025 13:15:48 -0800 Subject: [PATCH 8/8] add 4ns time-early to hodoscope patterns --- ...n2019TrigSinglesWithPulserDataMerging.lcsim | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/steering-files/src/main/resources/org/hps/steering/readout/PhysicsRun2019TrigSinglesWithPulserDataMerging.lcsim b/steering-files/src/main/resources/org/hps/steering/readout/PhysicsRun2019TrigSinglesWithPulserDataMerging.lcsim index f1afda715..3bab8c694 100644 --- a/steering-files/src/main/resources/org/hps/steering/readout/PhysicsRun2019TrigSinglesWithPulserDataMerging.lcsim +++ b/steering-files/src/main/resources/org/hps/steering/readout/PhysicsRun2019TrigSinglesWithPulserDataMerging.lcsim @@ -180,7 +180,7 @@ 1 true - + false - - HodoscopeCorrectedHits - HodoscopePatterns + + HodoscopeCorrectedHits + HodoscopePatterns true - - false + 4.0 + false