diff --git a/.gitignore b/.gitignore index 318e444d..eb22feb4 100644 --- a/.gitignore +++ b/.gitignore @@ -2,5 +2,6 @@ *.jl.cov *.jl.*.cov *.jl.mem +*.lh5 .ipynb_checkpoints Manifest.toml diff --git a/Project.toml b/Project.toml index fcab729e..ccf1451a 100644 --- a/Project.toml +++ b/Project.toml @@ -31,12 +31,13 @@ Unitful = "1986cc42-f94f-5a68-af5c-568840ba703d" [weakdeps] LegendHDF5IO = "c9265ca6-b027-5446-b1a4-febfa8dd10b0" +LegendDataTypes = "99e09c13-5545-5ee2-bfa2-77f358fb75d8" Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01" SolidStateDetectors = "71e43887-2bd9-5f77-aebd-47f656f0a3f0" [extensions] -LegendDataManagementLegendHDF5IOExt = "LegendHDF5IO" +LegendDataManagementLegendHDF5IOExt = ["LegendHDF5IO", "LegendDataTypes"] LegendDataManagementPlotsExt = ["RecipesBase", "Plots"] LegendDataManagementSolidStateDetectorsExt = "SolidStateDetectors" @@ -47,6 +48,7 @@ Format = "1" Glob = "1" IntervalSets = "0.6, 0.7" JSON = "0.21, 1" +LegendDataTypes = "0.1.13" LRUCache = "1.5" LegendHDF5IO = "0.1.14" LinearAlgebra = "1" @@ -70,6 +72,3 @@ TypedTables = "1.4" UUIDs = "1" Unitful = "1" julia = "1.10" - -[extras] -LegendHDF5IO = "c9265ca6-b027-5446-b1a4-febfa8dd10b0" diff --git a/docs/src/extensions.md b/docs/src/extensions.md index 4111ab8f..6aaa24eb 100644 --- a/docs/src/extensions.md +++ b/docs/src/extensions.md @@ -1,8 +1,42 @@ # Extensions +## `LegendHDF5IO` extension + +LegendDataManagment provides an extension for [LegendHDF5IO](https://github.com/legend-exp/LegendHDF5IO.jl). +This makes it possible to directly load LEGEND data from HDF5 files via the `read_ldata` function. The extension is automatically loaded when both packages are loaded. +Example (requires a `$LEGEND_DATA_CONFIG` environment variable pointing to a legend data-config file): + +```julia +using LegendDataManagement, LegendHDF5IO +l200 = LegendData(:l200) +filekeys = search_disk(FileKey, l200.tier[:jldsp, :cal, :p03, :r000]) + +chinfo = channelinfo(l200, (:p03, :r000, :cal); system=:geds, only_processable=true) + +ch = chinfo[1].channel + +dsp = read_ldata(l200, :jldsp, first(filekeys), ch) +dsp = read_ldata(l200, :jldsp, :cal, :p03, :r000, ch) +dsp = read_ldata((:e_cusp, :e_trap, :blmean, :blslope), l200, :jldsp, :cal, :p03, :r000, ch) +``` +`read_ldata` automitcally loads LEGEND data for a specific `DataTier` and data selection like e.g. a `FileKey` or a run-selection based for a given `ChannelId`. The `search_disk` function allows the user to search for available `DataTier` and `FileKey` on disk. The first argument can be either a selection of keys in form of a `NTuple` of `Symbol` or a [PropertyFunction](https://github.com/oschulz/PropertyFunctions.jl/tree/main) which will be applied during loading. +It is also possible to load whole a `DataPartition` or `DataPeriod` for a given `ChannelId` ch: +```julia +dsp = read_ldata(l200, :jldsp, :cal, DataPartition(1), ch) +dsp = read_ldata(l200, :jldsp, :cal, DataPeriod(3), ch) +``` +In additon, it is possible to load a random selection of `n_evts` events randomly selected from each loaded file: +```julia +dsp = read_ldata(l200, :jldsp, :cal, :p03, :r000, ch; n_evts=1000) +``` +For simplicity, the ch can also be given as a `DetectorID` which will be converted internally to a `ChannelId`: +```julia +det = chinfo[1].detector +dsp = read_ldata(l200, :jldsp, :cal, :p03, :r000, det) +``` ## `SolidStateDetectors` extension -LegendDataManagment provides an extension for [SolidStateDetectors](https://github.com/JuliaPhysics/SolidStateDetectors.jl). This makes it possible to create `SolidStateDetector` instances from LEGEND metadata. +LegendDataManagment provides an extension for [SolidStateDetectors](https://github.com/JuliaPhysics/SolidStateDetectors.jl). This makes it possible to create `SolidStateDetector` and `Simulation` instances from LEGEND metadata. Example (requires a `$LEGEND_DATA_CONFIG` environment variable pointing to a legend data-config file): @@ -18,6 +52,21 @@ A detector can also be constructed using the filename of the LEGEND metadata det det = SolidStateDetector(LegendData, "V99000A.json") ``` +In addition, when creating a `Simulation`, all simulation functions in SolidStateDetectors.jl can be applied. As usual, all fields stored in the `Simulation` can be written and read using `LegendHDF5IO`: + +```julia +using LegendDataManagement +using SolidStateDetectors + +sim = Simulation(LegendData, "V99000A.json") +simulate!(sim) # calculate electric field and weighting potentials + +using LegendHDF5IO +ssd_write("V99000A.lh5", sim) +sim_in = ssd_read("V99000A.lh5", Simulation) +``` + + The following code will generate an overview plot of every 5th LEGEND detector (requires the actual LEGEND metadata instead of the metadata in legend-testdata): ```julia diff --git a/ext/LegendDataManagementLegendHDF5IOExt.jl b/ext/LegendDataManagementLegendHDF5IOExt.jl index f4b44b92..3282478d 100644 --- a/ext/LegendDataManagementLegendHDF5IOExt.jl +++ b/ext/LegendDataManagementLegendHDF5IOExt.jl @@ -1,9 +1,26 @@ module LegendDataManagementLegendHDF5IOExt using LegendDataManagement +LegendDataManagement._lh5_ext_loaded(::Val{true}) = true +using LegendDataManagement.LDMUtils: detector2channel +using LegendDataManagement: RunCategorySelLike using LegendHDF5IO +using LegendDataTypes: fast_flatten, flatten_by_key +using TypedTables, PropertyFunctions -# using LegendDataManagement: LegendDataManagement.DataSelector +const ChannelOrDetectorIdLike = Union{ChannelIdLike, DetectorIdLike} +const AbstractDataSelectorLike = Union{AbstractString, Symbol, DataTierLike, DataCategoryLike, DataPeriodLike, DataRunLike, DataPartitionLike, ChannelOrDetectorIdLike} +const PossibleDataSelectors = [DataTier, DataCategory, DataPeriod, DataRun, DataPartition, ChannelId, DetectorId] + +function _get_channelid(data::LegendData, rsel::Union{AnyValiditySelection, RunCategorySelLike}, det::ChannelOrDetectorIdLike) + if LegendDataManagement._can_convert_to(ChannelId, det) + ChannelId(det) + elseif LegendDataManagement._can_convert_to(DetectorId, det) + detector2channel(data, rsel, det) + else + throw(ArgumentError("$det is neither a ChannelId nor a DetectorId")) + end +end const dataselector_bytypes = Dict{Type, String}() @@ -43,7 +60,7 @@ end function LegendHDF5IO.LH5Array(ds::LegendHDF5IO.HDF5.Dataset, ::Type{<:AbstractArray{<:T, N}}) where {T <: LegendDataManagement.DataSelector, N} - + s = read(ds) T.(s) end @@ -68,4 +85,166 @@ function __init__() (@isdefined DataPartition) && extend_datatype_dict(DataPartition, "datapartition") end -end \ No newline at end of file +function _lh5_data_open(f::Function, data::LegendData, tier::DataTierLike, filekey::FileKey, ch::ChannelIdLike, mode::AbstractString="r") + ch_filename = data.tier[DataTier(tier), filekey, ch] + filename = data.tier[DataTier(tier), filekey] + if isfile(ch_filename) + @debug "Read from $(basename(ch_filename))" + LegendHDF5IO.lh5open(f, ch_filename, mode) + elseif isfile(filename) + @debug "Read from $(basename(filename))" + LegendHDF5IO.lh5open(f, filename, mode) + else + throw(ArgumentError("Neither $(basename(filename)) nor $(basename(ch_filename)) found")) + end +end + +_propfunc_columnnames(f::PropSelFunction{cols}) where cols = cols + +_load_all_keys(nt::NamedTuple, n_evts::Int=-1) = if length(nt) == 1 _load_all_keys(nt[first(keys(nt))], n_evts) else NamedTuple{keys(nt)}(map(x -> _load_all_keys(nt[x], n_evts), keys(nt))) end +_load_all_keys(arr::AbstractArray, n_evts::Int=-1) = arr[:][if (n_evts < 1 || n_evts > length(arr)) 1:length(arr) else rand(1:length(arr), n_evts) end] +_load_all_keys(t::Table, n_evts::Int=-1) = t[:][if (n_evts < 1 || n_evts > length(t)) 1:length(t) else rand(1:length(t), n_evts) end] +_load_all_keys(x, n_evts::Int=-1) = x + +function LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTierLike, FileKey, ChannelOrDetectorIdLike}; n_evts::Int=-1) + tier, filekey, ch = DataTier(rsel[1]), rsel[2], if !isempty(string((rsel[3]))) _get_channelid(data, rsel[2], rsel[3]) else rsel[3] end + _lh5_data_open(data, tier, filekey, ch) do h + if !isempty(string((ch))) && !haskey(h, "$ch") + throw(ArgumentError("Channel $ch not found in $(basename(string(h.data_store)))")) + end + if f == identity + if !isempty(string((ch))) + _load_all_keys(h[ch, tier], n_evts) + else + _load_all_keys(h[tier], n_evts) + end + elseif f isa PropSelFunction + if !isempty(string((ch))) + _load_all_keys(getproperties(_propfunc_columnnames(f)...)(h[ch, tier]), n_evts) + else + _load_all_keys(getproperties(_propfunc_columnnames(f)...)(h[tier]), n_evts) + end + else + result = if !isempty(string((ch))) + f.(_load_all_keys(h[ch, tier], n_evts)) + else + f.(_load_all_keys(h[tier], n_evts)) + end + if result isa AbstractVector{<:NamedTuple} + Table(result) + else + result + end + end + end +end + +function LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTierLike, FileKey}; kwargs...) + ch_keys = _lh5_data_open(data, rsel[1], rsel[2], "") do h + keys(h) + end + @debug "Found keys: $ch_keys" + if length(ch_keys) == 1 + if string(only(ch_keys)) == string(rsel[1]) + LegendDataManagement.read_ldata(f, data, (rsel[1], rsel[2], ""); kwargs...) + elseif LegendDataManagement._can_convert_to(ChannelId, only(ch_keys)) || LegendDataManagement._can_convert_to(DetectorId, only(ch_keys)) + LegendDataManagement.read_ldata(f, data, (rsel[1], rsel[2], string(only(ch_keys))); kwargs...) + else + throw(ArgumentError("No tierm channel or detector found in $(basename(string(h.data_store)))")) + end + else + NamedTuple{Tuple(Symbol.(ch_keys))}([LegendDataManagement.read_ldata(f, data, (rsel[1], rsel[2], ch); kwargs...) for ch in ch_keys]...) + end +end + +lflatten(x) = fast_flatten(x) +# lflatten(t::AbstractVector{<:Table}) = append!(t...) +lflatten(nt::AbstractVector{<:NamedTuple}) = flatten_by_key(nt) + +function LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTierLike, AbstractVector{FileKey}, ChannelOrDetectorIdLike}; kwargs...) + if !isempty(string(rsel[3])) + lflatten([LegendDataManagement.read_ldata(f, data, (rsel[1], fk, rsel[3]); kwargs...) for fk in rsel[2]]) + else + lflatten([LegendDataManagement.read_ldata(f, data, (rsel[1], fk); kwargs...) for fk in rsel[2]]) + end +end +LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTierLike, AbstractVector{FileKey}}; kwargs...) = + LegendDataManagement.read_ldata(f, data, (rsel[1], rsel[2], ""); kwargs...) + +### Argument distinction for different DataSelector Types +function _convert_rsel2dsel(rsel::NTuple{<:Any, AbstractDataSelectorLike}) + selector_types = [PossibleDataSelectors[LegendDataManagement._can_convert_to.(PossibleDataSelectors, Ref(s))] for s in rsel] + if length(selector_types[2]) > 1 && DataCategory in selector_types[2] + selector_types[2] = [DataCategory] + end + if isempty(last(selector_types)) + selector_types[end] = [String] + end + if !all(length.(selector_types) .<= 1) + throw(ArgumentError("Ambiguous selector types: $selector_types for $rsel")) + end + Tuple([only(st)(r) for (r, st) in zip(rsel, selector_types)]) +end + +function LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::NTuple{<:Any, AbstractDataSelectorLike}; kwargs...) + LegendDataManagement.read_ldata(f, data, _convert_rsel2dsel(rsel); kwargs...) +end + +LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTier, DataCategory, DataPeriod}; kwargs...) = + LegendDataManagement.read_ldata(f, data, (DataTier(rsel[1]), DataCategory(rsel[2]), DataPeriod(rsel[3]), "")) + +LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTier, DataCategory, DataPeriod, DataRun}; kwargs...) = + LegendDataManagement.read_ldata(f, data, (rsel[1], rsel[2], rsel[3], rsel[4], "")) + + +function LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTier, DataCategory, DataPartition, ChannelOrDetectorIdLike}; kwargs...) + first_run = first(LegendDataManagement._get_partitions(data, :default)[rsel[3]]) + ch = _get_channelid(data, (first_run.period, first_run.run, rsel[2]), rsel[4]) + pinfo = partitioninfo(data, ch, rsel[3]) + @assert ch == _get_channelid(data, (first(pinfo).period, first(pinfo).run, rsel[2]), rsel[4]) "Channel mismatch in partitioninfo" + LegendDataManagement.read_ldata(f, data, (rsel[1], rsel[2], pinfo, ch); kwargs...) +end + +function LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTier, DataCategory, DataPeriod, ChannelOrDetectorIdLike}; kwargs...) + rinfo = runinfo(data, rsel[3]) + first_run = first(rinfo) + ch = if !isempty(string(rsel[4])) + _get_channelid(data, (first_run.period, first_run.run, rsel[2]), rsel[4]) + else + string(rsel[4]) + end + LegendDataManagement.read_ldata(f, data, (rsel[1], rsel[2], rinfo, ch); kwargs...) +end + +function LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTier, DataCategory, DataPeriod, DataRun, ChannelOrDetectorIdLike}; kwargs...) + fks = search_disk(FileKey, data.tier[rsel[1], rsel[2], rsel[3], rsel[4]]) + ch = rsel[5] + if isempty(fks) && isfile(data.tier[rsel[1:4]..., ch]) + LegendDataManagement.read_ldata(f, data, (rsel[1], start_filekey(data, (rsel[3], rsel[4], rsel[2])), ch); kwargs...) + elseif !isempty(fks) + LegendDataManagement.read_ldata(f, data, (rsel[1], fks, ch); kwargs...) + else + throw(ArgumentError("No filekeys found for $(rsel[2]) $(rsel[3]) $(rsel[4])")) + end +end + + +### DataPartition +const _partinfo_required_cols = NamedTuple{(:period, :run), Tuple{DataPeriod, DataRun}} + +LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTierLike, DataCategoryLike, Table{_partinfo_required_cols}, ChannelOrDetectorIdLike}; kwargs...) = + lflatten([LegendDataManagement.read_ldata(f, data, (rsel[1], rsel[2], r.period, r.run, rsel[4]); kwargs...) for r in rsel[3]]) + +LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTierLike, DataCategoryLike, Table{_partinfo_required_cols}}; kwargs...) = + LegendDataManagement.read_ldata(f, data, (rsel[1], rsel[2], rsel[3], ""); kwargs...) + +function LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTierLike, DataCategoryLike, Table, ChannelOrDetectorIdLike}; kwargs...) + @assert (hasproperty(rsel[3], :period) && hasproperty(rsel[3], :run)) "Runtable doesn't provide periods and runs" + LegendDataManagement.read_ldata(f, data, (rsel[1], rsel[2], Table(period = rsel[3].period, run = rsel[3].run), rsel[4]); kwargs...) +end + +LegendDataManagement.read_ldata(f::Base.Callable, data::LegendData, rsel::Tuple{DataTierLike, DataCategoryLike, Table}; kwargs...) = + LegendDataManagement.read_ldata(f, data, (rsel[1], rsel[2], rsel[3], ""); kwargs...) + + +end # module \ No newline at end of file diff --git a/ext/LegendDataManagementSolidStateDetectorsExt.jl b/ext/LegendDataManagementSolidStateDetectorsExt.jl index dd1b5306..93ea962f 100644 --- a/ext/LegendDataManagementSolidStateDetectorsExt.jl +++ b/ext/LegendDataManagementSolidStateDetectorsExt.jl @@ -3,38 +3,33 @@ module LegendDataManagementSolidStateDetectorsExt using SolidStateDetectors -import SolidStateDetectors.ConstructiveSolidGeometry as CSG - using LegendDataManagement using Unitful using PropDicts - const _SSDDefaultNumtype = Float32 + """ - SolidStateDetector[{T<:Real}](data::LegendData, detector::DetectorIdLike - SolidStateDetector[{T<:Real}(::Type{LegendData}, detector_props::AbstractDict) - SolidStateDetector[{T<:Real}(::Type{LegendData}, json_filename::AbstractString) + SolidStateDetector[{T<:AbstractFloat}](data::LegendData, detector::DetectorIdLike) + SolidStateDetector[{T<:AbstractFloat}(::Type{LegendData}, detector_props::AbstractDict) + SolidStateDetector[{T<:AbstractFloat}(::Type{LegendData}, json_filename::AbstractString) LegendDataManagement provides an extension for SolidStateDetectors, a -`SolidStateDetector` can be constructed from LEGEND metadata using the +`SolidStateDetector` can be constructed from LEGEND metadata using the methods above. """ function SolidStateDetectors.SolidStateDetector(data::LegendData, detector::DetectorIdLike) SolidStateDetectors.SolidStateDetector{_SSDDefaultNumtype}(data, detector) end -function SolidStateDetectors.SolidStateDetector{T}(data::LegendData, detector::DetectorIdLike) where {T<:Real} +function SolidStateDetectors.SolidStateDetector{T}(data::LegendData, detector::DetectorIdLike) where {T<:AbstractFloat} detector_props = getproperty(data.metadata.hardware.detectors.germanium.diodes, Symbol(detector)) - SolidStateDetector{T}(LegendData, detector_props) + xtal_props = getproperty(data.metadata.hardware.detectors.germanium.crystals, Symbol(string(detector)[1:end-1])) + SolidStateDetector{T}(LegendData, detector_props, xtal_props) end - -to_SSD_units(::Type{T}, x, unit) where {T} = T(SolidStateDetectors.to_internal_units(x*unit)) - - -function SolidStateDetectors.SolidStateDetector{T}(::Type{LegendData}, filename::String) where {T<:Real} +function SolidStateDetectors.SolidStateDetector{T}(::Type{LegendData}, filename::String) where {T<:AbstractFloat} SolidStateDetector{T}(LegendData, readprops(filename, subst_pathvar = false, subst_env = false, trim_null = false)) end @@ -46,11 +41,58 @@ function SolidStateDetectors.SolidStateDetector(::Type{LegendData}, meta::Abstra SolidStateDetectors.SolidStateDetector{_SSDDefaultNumtype}(LegendData, meta) end -function SolidStateDetectors.SolidStateDetector{T}(::Type{LegendData}, meta::AbstractDict) where {T<:Real} - SolidStateDetectors.SolidStateDetector{T}(LegendData, convert(PropDict, meta)) +function SolidStateDetectors.SolidStateDetector{T}(::Type{LegendData}, meta::AbstractDict) where {T<:AbstractFloat} + SolidStateDetectors.SolidStateDetector{T}(LegendData, convert(PropDict, meta), LegendDataManagement.NoSuchPropsDBEntry("",[])) +end + +function SolidStateDetectors.SolidStateDetector{T}(::Type{LegendData}, meta::PropDict, xtal_meta::Union{PropDict, LegendDataManagement.NoSuchPropsDBEntry}) where {T<:AbstractFloat} + config_dict = create_SSD_config_dict_from_LEGEND_metadata(meta, xtal_meta) + return SolidStateDetector{T}(config_dict, SolidStateDetectors.construct_units(config_dict)) +end + +""" + Simulation[{T<:AbstractFloat}](data::LegendData, detector::DetectorIdLike) + Simulation[{T<:AbstractFloat}(::Type{LegendData}, detector_props::AbstractDict) + Simulation[{T<:AbstractFloat}(::Type{LegendData}, json_filename::AbstractString) + +LegendDataManagement provides an extension for SolidStateDetectors, a +`Simulation` can be constructed from LEGEND metadata using the +methods above. +""" +function SolidStateDetectors.Simulation(data::LegendData, detector::DetectorIdLike) + SolidStateDetectors.Simulation{_SSDDefaultNumtype}(data, detector) +end + +function SolidStateDetectors.Simulation{T}(data::LegendData, detector::DetectorIdLike) where {T<:AbstractFloat} + detector_props = getproperty(data.metadata.hardware.detectors.germanium.diodes, Symbol(detector)) + xtal_props = getproperty(data.metadata.hardware.detectors.germanium.crystals, Symbol(string(detector)[1:end-1])) + Simulation{T}(LegendData, detector_props, xtal_props) +end + +function SolidStateDetectors.Simulation{T}(::Type{LegendData}, filename::String) where {T<:AbstractFloat} + Simulation{T}(LegendData, readprops(filename, subst_pathvar = false, subst_env = false, trim_null = false)) +end + +function SolidStateDetectors.Simulation(::Type{LegendData}, filename::String) + Simulation{_SSDDefaultNumtype}(LegendData, filename) +end + +function SolidStateDetectors.Simulation(::Type{LegendData}, meta::AbstractDict) + SolidStateDetectors.Simulation{_SSDDefaultNumtype}(LegendData, meta) end -function SolidStateDetectors.SolidStateDetector{T}(::Type{LegendData}, meta::PropDict) where {T<:Real} +function SolidStateDetectors.Simulation{T}(::Type{LegendData}, meta::AbstractDict) where {T<:AbstractFloat} + SolidStateDetectors.Simulation{T}(LegendData, convert(PropDict, meta), LegendDataManagement.NoSuchPropsDBEntry("", [])) +end + +function SolidStateDetectors.Simulation{T}(::Type{LegendData}, meta::PropDict, xtal_meta::Union{PropDict, LegendDataManagement.NoSuchPropsDBEntry}) where {T<:AbstractFloat} + config_dict = create_SSD_config_dict_from_LEGEND_metadata(meta, xtal_meta) + return Simulation{T}(config_dict) +end + + +function create_SSD_config_dict_from_LEGEND_metadata(meta::PropDict, xtal_meta::X; dicttype = Dict{String,Any}) where {X <: Union{PropDict, LegendDataManagement.NoSuchPropsDBEntry}} + # Not all possible configurations are yet implemented! # https://github.com/legend-exp/legend-metadata/blob/main/hardware/detectors/detector-metadata_1.pdf # https://github.com/legend-exp/legend-metadata/blob/main/hardware/detectors/detector-metadata_2.pdf @@ -60,148 +102,224 @@ function SolidStateDetectors.SolidStateDetector{T}(::Type{LegendData}, meta::Pro # https://github.com/legend-exp/legend-metadata/blob/main/hardware/detectors/detector-metadata_6.pdf # https://github.com/legend-exp/legend-metadata/blob/main/hardware/detectors/detector-metadata_7.pdf - gap = to_SSD_units(T, 1, u"mm") + gap = 1.0 dl_thickness_in_mm = :dl_thickness_in_mm in keys(meta.geometry) ? meta.geometry.dl_thickness_in_mm : 0 - li_thickness = to_SSD_units(T, dl_thickness_in_mm, u"mm") + li_thickness = dl_thickness_in_mm - crystal_radius = to_SSD_units(T, meta.geometry.radius_in_mm, u"mm") - crystal_height = to_SSD_units(T, meta.geometry.height_in_mm, u"mm") + crystal_radius = meta.geometry.radius_in_mm + crystal_height = meta.geometry.height_in_mm is_coax = meta.type == "coax" - # main crystal - semiconductor_geometry = CSG.Cone{T}(CSG.ClosedPrimitive; - r = crystal_radius, - hZ = crystal_height / 2, - origin = CartesianPoint{T}(0, 0, crystal_height / 2) + config_dict = dicttype( + "name" => meta.name, + "units" => dicttype( + "length" => "mm", + "potential" => "V", + "angle" => "deg", + "temperature" => "K" + ), + "grid" => dicttype( + "coordinates" => "cylindrical", + "axes" => dicttype( + "r" => dicttype( + "to" => crystal_radius * 1.2, + "boundaries" => "inf" + ), + "phi" => dicttype( + "from" => 0, + "to" => 0, + "boundaries" => "reflecting" + ), + "z" => dicttype( + "from" => -0.2 * crystal_height, + "to" => 1.2 * crystal_height, + "boundaries" => "inf" + ) + ) + ), + "medium" => "vacuum", + "detectors" => [] ) - # borehole - has_borehole = haskey(meta.geometry, :borehole) - if is_coax && !has_borehole - error("Coax detectors should have boreholes") - end - if has_borehole - borehole_depth = to_SSD_units(T, meta.geometry.borehole.depth_in_mm, u"mm") - borehole_radius = to_SSD_units(T, meta.geometry.borehole.radius_in_mm, u"mm") - semiconductor_geometry -= CSG.Cone{T}(CSG.ClosedPrimitive; - r = borehole_radius, - hZ = borehole_depth / 2 + gap, - origin = CartesianPoint{T}(0, 0, is_coax ? borehole_depth/2 - gap : crystal_height - borehole_depth/2 + gap) - ) - end + push!(config_dict["detectors"], dicttype( + "semiconductor" => dicttype( + "material" => "HPGe", + "charge_drift_model" => dicttype( + "include" => joinpath(SolidStateDetectors.get_path_to_example_config_files(), "ADLChargeDriftModel", "drift_velocity_config.yaml"), + ), + # "impurity_density" => dicttype("parameters" => Vector()), + "geometry" => dicttype(), + "temperature" => 78 + ), + "contacts" => [] + )) - # borehole taper - has_borehole_taper = haskey(meta.geometry.taper, :borehole) - if has_borehole_taper - borehole_taper_height = to_SSD_units(T, meta.geometry.taper.borehole.height_in_mm, u"mm") - if haskey(meta.geometry.taper.borehole, :radius_in_mm) - borehole_taper_radius = to_SSD_units(T, meta.geometry.taper.borehole.radius_in_mm, u"mm") - borehole_taper_angle = atan(borehole_taper_radius, borehole_taper_height) - elseif haskey(meta.geometry.taper.borehole, :angle_in_deg) - borehole_taper_angle = to_SSD_units(T, meta.geometry.taper.borehole.angle_in_deg, u"°") - borehole_taper_radius = borehole_taper_height * tan(borehole_taper_angle) - else - error("The borehole taper needs either radius_in_mm or angle_in_deg") - end - has_borehole_taper = borehole_taper_height > 0 && borehole_taper_angle > 0 - if has_borehole_taper && !has_borehole - error("A detector without a borehole cannot have a borehole taper.") + # main crystal + semiconductor_geometry_basis = dicttype("cone" => dicttype( + "r" => crystal_radius, + "h" => crystal_height, + "origin" => [0, 0, crystal_height / 2] + )) + semiconductor_geometry_subtractions = [] + begin + # borehole + has_borehole = haskey(meta.geometry, :borehole) + if is_coax && !has_borehole + error("Coax detectors should have boreholes") end - if has_borehole_taper && is_coax - error("Coax detectors should not have borehole tapers") + if has_borehole + borehole_depth = meta.geometry.borehole.depth_in_mm + borehole_radius = meta.geometry.borehole.radius_in_mm + push!(semiconductor_geometry_subtractions, dicttype("cone" => dicttype( + "r" => borehole_radius, + "h" => borehole_depth + 2*gap, + "origin" => [0, 0, is_coax ? borehole_depth/2 - gap : crystal_height - borehole_depth/2 + gap] + ))) end + + # borehole taper + has_borehole_taper = haskey(meta.geometry.taper, :borehole) if has_borehole_taper - r_center = borehole_radius + borehole_taper_height * tan(borehole_taper_angle) / 2 - hZ = borehole_taper_height/2 - Δr = hZ * tan(borehole_taper_angle) - r_out_bot = r_center - Δr - r_out_top = r_center + Δr * (1 + 2*gap/hZ) - r = ((r_out_bot,), (r_out_top,)) - semiconductor_geometry -= CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = hZ + gap, - origin = CartesianPoint{T}(0, 0, crystal_height - borehole_taper_height/2 + gap) - ) + borehole_taper_height = meta.geometry.taper.borehole.height_in_mm + if haskey(meta.geometry.taper.borehole, :radius_in_mm) + borehole_taper_radius = meta.geometry.taper.borehole.radius_in_mm + borehole_taper_angle = atand(borehole_taper_radius, borehole_taper_height) + elseif haskey(meta.geometry.taper.borehole, :angle_in_deg) + borehole_taper_angle = meta.geometry.taper.borehole.angle_in_deg + borehole_taper_radius = borehole_taper_height * tand(borehole_taper_angle) + else + error("The borehole taper needs either radius_in_mm or angle_in_deg") + end + has_borehole_taper = borehole_taper_height > 0 && borehole_taper_angle > 0 + if has_borehole_taper && !has_borehole + error("A detector without a borehole cannot have a borehole taper.") + end + if has_borehole_taper && is_coax + error("Coax detectors should not have borehole tapers") + end + if has_borehole_taper + r_center = borehole_radius + borehole_taper_radius / 2 + hZ = borehole_taper_height/2 + Δr = hZ * tand(borehole_taper_angle) + r_out_bot = r_center - Δr + r_out_top = r_center + Δr * (1 + 2*gap/hZ) + push!(semiconductor_geometry_subtractions, dicttype("cone" => dicttype( + "r" => dicttype( + "bottom" => r_out_bot, + "top" => r_out_top + ), + "h" => 2 * hZ, + "origin" => [0, 0, crystal_height - borehole_taper_height / 2 + gap] + ))) + end + end + + # top taper + if haskey(meta.geometry.taper, :top) + top_taper_height = meta.geometry.taper.top.height_in_mm + if haskey(meta.geometry.taper.top, :radius_in_mm) + top_taper_radius = meta.geometry.taper.top.radius_in_mm + top_taper_angle = atand(top_taper_radius, top_taper_height) + elseif haskey(meta.geometry.taper.top, :angle_in_deg) + top_taper_angle = meta.geometry.taper.top.angle_in_deg + top_taper_radius = top_taper_height * tand(top_taper_angle) + else + error("The top taper needs either radius_in_mm or angle_in_deg") + end + has_top_taper = top_taper_height > 0 && top_taper_angle > 0 + if has_top_taper + r_center = crystal_radius - top_taper_radius / 2 + hZ = top_taper_height/2 + 1gap + h = 2 * hZ + Δr = hZ * tand(top_taper_angle) + r_in_bot = r_center + Δr + r_in_top = r_center - Δr + r_out = max(r_in_top, r_in_bot) + gap # ensure that r_out is always bigger as r_in + push!(semiconductor_geometry_subtractions, dicttype("cone" => dicttype( + "r" => dicttype( + "bottom" => dicttype( + "from" => r_in_bot, + "to" => r_out + ), + "top" => dicttype( + "from" => r_in_top, + "to" => r_out + ) + ), + "h" => h, + "origin" => [0, 0, crystal_height - top_taper_height / 2] + ))) + end end - end - # top taper - if haskey(meta.geometry.taper, :top) - top_taper_height = to_SSD_units(T, meta.geometry.taper.top.height_in_mm, u"mm") - if haskey(meta.geometry.taper.top, :radius_in_mm) - top_taper_radius = to_SSD_units(T, meta.geometry.taper.top.radius_in_mm, u"mm") - top_taper_angle = atan(top_taper_radius, top_taper_height) - elseif haskey(meta.geometry.taper.top, :angle_in_deg) - top_taper_angle = to_SSD_units(T, meta.geometry.taper.top.angle_in_deg, u"°") - top_taper_radius = top_taper_height * tan(top_taper_angle) + # bot outer taper + bot_taper_height = meta.geometry.taper.bottom.height_in_mm + if :radius_in_mm in keys(meta.geometry.taper.bottom) + bot_taper_radius = meta.geometry.taper.bottom.radius_in_mm + bot_taper_angle = atand(bot_taper_radius, bot_taper_height) + elseif :angle_in_deg in keys(meta.geometry.taper.bottom) + bot_taper_angle = meta.geometry.taper.bottom.angle_in_deg + bot_taper_radius = bot_taper_height * tand(bot_taper_angle) else - error("The top taper needs either radius_in_mm or angle_in_deg") + error("The bottom outer tape needs either radius_in_mm or angle_in_deg") end - has_top_taper = top_taper_height > 0 && top_taper_angle > 0 - if has_top_taper - r_center = crystal_radius - top_taper_height * tan(top_taper_angle) / 2 - hZ = top_taper_height/2 + 1gap - Δr = hZ * tan(top_taper_angle) - r_in_bot = r_center + Δr - r_in_top = r_center - Δr + has_bot_taper = bot_taper_height > 0 && bot_taper_angle > 0 + if has_bot_taper + r_center = crystal_radius - bot_taper_radius / 2 + hZ = bot_taper_height/2 + 1gap + Δr = hZ * tand(bot_taper_angle) + r_in_bot = r_center - Δr + r_in_top = r_center + Δr r_out = max(r_in_top, r_in_bot) + gap # ensure that r_out is always bigger as r_in - r = ((r_in_bot, r_out),(r_in_top, r_out)) - semiconductor_geometry -= CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = hZ, - origin = CartesianPoint{T}(0, 0, crystal_height - top_taper_height/2) - ) + push!(semiconductor_geometry_subtractions, dicttype("cone" => dicttype( + "r" => dicttype( + "bottom" => dicttype( + "from" => r_in_bot, + "to" => r_out + ), + "top" => dicttype( + "from" => r_in_top, + "to" => r_out + ) + ), + "h" => 2 * hZ, + "origin" => [0, 0, bot_taper_height / 2] + ))) + end + + # groove + has_groove = haskey(meta.geometry, :groove) + if has_groove + groove_inner_radius = meta.geometry.groove.radius_in_mm.inner + groove_outer_radius = meta.geometry.groove.radius_in_mm.outer + groove_depth = meta.geometry.groove.depth_in_mm + has_groove = groove_outer_radius > 0 && groove_depth > 0 && groove_inner_radius > 0 + if has_groove + hZ = groove_depth / 2 + gap + r_in = groove_inner_radius + r_out = groove_outer_radius + push!(semiconductor_geometry_subtractions, dicttype("cone" => dicttype( + "r" => dicttype( + "from" => r_in, + "to" => r_out + ), + "h" => 2 * hZ, + "origin" => [0, 0, groove_depth / 2 - gap] + ))) + end end end - # bot outer taper - bot_taper_height = to_SSD_units(T, meta.geometry.taper.bottom.height_in_mm, u"mm") - if :radius_in_mm in keys(meta.geometry.taper.bottom) - bot_taper_radius = to_SSD_units(T, meta.geometry.taper.bottom.radius_in_mm, u"mm") - bot_taper_angle = atan(bot_taper_radius, bot_taper_height) - elseif :angle_in_deg in keys(meta.geometry.taper.bottom) - bot_taper_angle = to_SSD_units(T, meta.geometry.taper.bottom.angle_in_deg, u"°") - bot_taper_radius = bot_taper_height * tan(bot_taper_angle) + if isempty(semiconductor_geometry_subtractions) + config_dict["detectors"][1]["semiconductor"]["geometry"] = semiconductor_geometry_basis else - error("The bottom outer tape needs either radius_in_mm or angle_in_deg") - end - has_bot_taper = bot_taper_height > 0 && bot_taper_angle > 0 - if has_bot_taper - r_center = crystal_radius - bot_taper_height * tan(bot_taper_angle) / 2 - hZ = bot_taper_height/2 + 1gap - Δr = hZ * tan(bot_taper_angle) - r_in_bot = r_center - Δr - r_in_top = r_center + Δr - r_out = max(r_in_top, r_in_bot) + gap # ensure that r_out is always bigger as r_in - r = ((r_in_bot, r_out),(r_in_top, r_out)) - semiconductor_geometry -= CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = hZ, - origin = CartesianPoint{T}(0, 0, bot_taper_height/2) + config_dict["detectors"][1]["semiconductor"]["geometry"] = dicttype( + "difference" => [semiconductor_geometry_basis, semiconductor_geometry_subtractions...] ) end - # groove - has_groove = haskey(meta.geometry, :groove) - if has_groove - groove_inner_radius = to_SSD_units(T, meta.geometry.groove.radius_in_mm.inner, u"mm") - groove_outer_radius = to_SSD_units(T, meta.geometry.groove.radius_in_mm.outer, u"mm") - groove_depth = to_SSD_units(T, meta.geometry.groove.depth_in_mm, u"mm") - has_groove = groove_outer_radius > 0 && groove_depth > 0 && groove_inner_radius > 0 - if has_groove - hZ = groove_depth / 2 + gap - r_in = groove_inner_radius - r_out = groove_outer_radius - r = ((r_in, r_out), (r_in, r_out)) - semiconductor_geometry -= CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = hZ, - origin = CartesianPoint{T}(0, 0, groove_depth / 2 - gap) - ) - end - end # bulletization @@ -214,33 +332,56 @@ function SolidStateDetectors.SolidStateDetector{T}(::Type{LegendData}, meta::Pro ### P+ CONTACT ### - pp_radius = to_SSD_units(T, meta.geometry.pp_contact.radius_in_mm, u"mm") - pp_depth = to_SSD_units(T, meta.geometry.pp_contact.depth_in_mm, u"mm") - pp_contact_geometry = if is_coax - CSG.Cone{T}(CSG.ClosedPrimitive; - r = ((borehole_radius, borehole_radius), (borehole_radius, borehole_radius)), - hZ = borehole_depth/2, - origin = CartesianPoint{T}(0, 0, borehole_depth / 2) - ) + CSG.Cone{T}(CSG.ClosedPrimitive; - r = borehole_radius, - hZ = 0, - origin = CartesianPoint{T}(0, 0, borehole_depth) - ) + CSG.Cone{T}(CSG.ClosedPrimitive; - r = ((borehole_radius, pp_radius), (borehole_radius, pp_radius)), - hZ = 0 - ) + pp_radius = meta.geometry.pp_contact.radius_in_mm + pp_depth = meta.geometry.pp_contact.depth_in_mm + push!(config_dict["detectors"][1]["contacts"], dicttype( + "material" => "HPGe", + "geometry" => dicttype(), + "id" => 1, + "potential" => 0 + )) + config_dict["detectors"][1]["contacts"][1]["geometry"] = if is_coax + dicttype("union" => [ + dicttype("cone" => dicttype( + "r" => dicttype( + "from" => borehole_radius, + "to" => borehole_radius + ), + "h" => borehole_depth, + "origin" => [0, 0, borehole_depth / 2] + )), + dicttype("cone" => dicttype( + "r" => borehole_radius, + "h" => 0, + "origin" => [0, 0, borehole_depth] + )), + dicttype("cone" => dicttype( + "r" => dicttype( + "from" => borehole_radius, + "to" => pp_radius + ), + "h" => 0 + )) + ]) else - CSG.Cone{T}(CSG.ClosedPrimitive; - r = pp_radius, - hZ = pp_depth / 2, - origin = CartesianPoint{T}(0, 0, pp_depth / 2) - ) + dicttype("cone" => dicttype( + "r" => pp_radius, + "h" => pp_depth, + "origin" => [0, 0, pp_depth / 2] + )) end ### MANTLE CONTACT ### - - mantle_contact_geometry = begin # top plate + + push!(config_dict["detectors"][1]["contacts"], dicttype( + "material" => "HPGe", + "geometry" => dicttype("union" => []), + "id" => 2, + "potential" => meta.characterization.manufacturer.recommended_voltage_in_V + )) + config_dict["detectors"][1]["contacts"][2]["geometry"]["union"] = begin + mantle_contact_parts = [] top_plate = begin r = if !has_borehole || is_coax !has_top_taper ? crystal_radius : crystal_radius - top_taper_radius @@ -249,149 +390,153 @@ function SolidStateDetectors.SolidStateDetector{T}(::Type{LegendData}, meta::Pro r_out = crystal_radius if has_borehole_taper r_in += borehole_taper_radius end if has_top_taper r_out -= top_taper_radius end - ((r_in, r_out), (r_in, r_out)) + dicttype("from" => r_in, "to" => r_out) end - CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = li_thickness / 2, - origin = CartesianPoint{T}(0, 0, crystal_height - li_thickness / 2) - ) + dicttype("cone" => dicttype( + "r" => r, + "h" => li_thickness, + "origin" => [0, 0, crystal_height - li_thickness / 2] + )) end - mc_geometry = top_plate - - # borehole at outer taper + push!(mantle_contact_parts, top_plate) + if has_top_taper - Δr_li_thickness = li_thickness / cos(top_taper_angle) - hZ = top_taper_height/2 + Δr_li_thickness = li_thickness / cosd(top_taper_angle) + h = top_taper_height r_bot = crystal_radius r_top = crystal_radius - top_taper_radius - r = ((r_bot - Δr_li_thickness, r_bot),(r_top - Δr_li_thickness, r_top)) - mc_geometry += CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = hZ, - origin = CartesianPoint{T}(0, 0, crystal_height - top_taper_height/2) - ) + push!(mantle_contact_parts, dicttype("cone" => dicttype( + "r" => dicttype( + "bottom" => dicttype( + "from" => r_bot - Δr_li_thickness, + "to" => r_bot + ), + "top" => dicttype( + "from" => r_top - Δr_li_thickness, + "to" => r_top + ) + ), + "h" => h, + "origin" => [0, 0, crystal_height - top_taper_height / 2] + ))) end - # contact in borehole if has_borehole_taper - Δr_li_thickness = li_thickness / cos(borehole_taper_angle) - hZ = borehole_taper_height/2 + Δr_li_thickness = li_thickness / cosd(borehole_taper_angle) + h = borehole_taper_height r_bot = borehole_radius r_top = borehole_radius + borehole_taper_radius - r = ((r_bot, r_bot+Δr_li_thickness),(r_top, r_top+Δr_li_thickness)) - mc_geometry += CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = hZ, - origin = CartesianPoint{T}(0, 0, crystal_height - borehole_taper_height/2) - ) - - hZ = (borehole_depth - borehole_taper_height) / 2 - r = ((borehole_radius, borehole_radius+Δr_li_thickness),(borehole_radius, borehole_radius+Δr_li_thickness)) - mc_geometry += CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = hZ, - origin = CartesianPoint{T}(0, 0, crystal_height - borehole_taper_height - hZ) - ) + push!(mantle_contact_parts, dicttype("cone" => dicttype( + "r" => dicttype( + "bottom" => dicttype( + "from" => r_bot, + "to" => r_bot + Δr_li_thickness + ), + "top" => dicttype( + "from" => r_top, + "to" => r_top + Δr_li_thickness + ) + ), + "h" => h, + "origin" => [0, 0, crystal_height - borehole_taper_height / 2] + ))) + + h = (borehole_depth - borehole_taper_height) + push!(mantle_contact_parts, dicttype("cone" => dicttype( + "r" => dicttype( + "from" => borehole_radius, + "to" => borehole_radius + Δr_li_thickness + ), + "h" => h, + "origin" => [0, 0, crystal_height - borehole_taper_height - h / 2] + ))) elseif has_borehole && !is_coax # but no borehole taper - hZ = borehole_depth / 2 - r = ((borehole_radius, borehole_radius+li_thickness),(borehole_radius, borehole_radius+li_thickness)) - mc_geometry += CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = hZ, - origin = CartesianPoint{T}(0, 0, crystal_height - hZ) - ) + h = borehole_depth + push!(mantle_contact_parts, dicttype("cone" => dicttype( + "r" => dicttype( + "from" => borehole_radius, + "to" => borehole_radius + li_thickness + ), + "h" => h, + "origin" => [0, 0, crystal_height - h / 2] + ))) end if has_borehole && !is_coax r = borehole_radius + li_thickness - mc_geometry += CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = li_thickness / 2, - origin = CartesianPoint{T}(0, 0, crystal_height - borehole_depth - li_thickness / 2) - ) + push!(mantle_contact_parts, dicttype("cone" => dicttype( + "r" => r, + "h" => li_thickness / 2, + "origin" => [0, 0, crystal_height - borehole_depth - li_thickness / 2] + ))) end - # outer surface of mantle contact begin - r = ((crystal_radius-li_thickness, crystal_radius),(crystal_radius-li_thickness, crystal_radius)) - hZ = crystal_height - if has_top_taper hZ -= top_taper_height end - z_origin = hZ/2 + h = crystal_height + if has_top_taper h -= top_taper_height end + z_origin = h/2 if has_bot_taper - hZ -= bot_taper_height + h -= bot_taper_height z_origin += bot_taper_height/2 end - mc_geometry += CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = hZ / 2, - origin = CartesianPoint{T}(0, 0, z_origin) - ) + push!(mantle_contact_parts, dicttype("cone" => dicttype( + "r" => dicttype( + "from" => crystal_radius - li_thickness, + "to" => crystal_radius + ), + "h" => h, + "origin" => [0, 0, z_origin] + ))) end - # bottom outer taper contact if has_bot_taper - Δr_li_thickness = li_thickness / cos(bot_taper_angle) - hZ = bot_taper_height/2 + Δr_li_thickness = li_thickness / cosd(bot_taper_angle) + h = bot_taper_height r_bot = crystal_radius - bot_taper_radius r_top = crystal_radius - r = ((r_bot - Δr_li_thickness, r_bot),(r_top - Δr_li_thickness, r_top)) - mc_geometry += CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = hZ, - origin = CartesianPoint{T}(0, 0, hZ) - ) - end + push!(mantle_contact_parts, dicttype("cone" => dicttype( + "r" => dicttype( + "bottom" => dicttype( + "from" => r_bot - Δr_li_thickness, + "to" => r_bot + ), + "top" => dicttype( + "from" => r_top - Δr_li_thickness, + "to" => r_top + ) + ), + "h" => h, + "origin" => [0, 0, h / 2] + ))) + end - # bottom surface of mantle contact (only if it has a groove ?) if has_groove && groove_outer_radius > 0 r_in = groove_outer_radius r_out = crystal_radius if has_bot_taper r_out -= bot_taper_radius end - r = ((r_in, r_out), (r_in, r_out)) - mc_geometry += CSG.Cone{T}(CSG.ClosedPrimitive; - r = r, - hZ = li_thickness / 2, - origin = CartesianPoint{T}(0, 0, li_thickness / 2) - ) + push!(mantle_contact_parts, dicttype("cone" => dicttype( + "r" => dicttype( + "from" => r_in, + "to" => r_out + ), + "h" => li_thickness, + "origin" => [0, 0, li_thickness / 2] + ))) end - - mc_geometry + + mantle_contact_parts end - - # Hardcoded parameter values: In future, should be defined in config file - temperature = T(78) - material = SolidStateDetectors.material_properties[:HPGe] - - # Impurity Model: Information are stored in `meta.production.impcc` - # For now: Constant impurity density: - # n-type: positive impurity density - # p-type: negative impurity density - # Assume p-type - constant_impurity_density = ustrip(uconvert(u"m^-3", T(-1e9) * u"cm^-3")) - impurity_density_model = SolidStateDetectors.CylindricalImpurityDensity{T}( - (0, 0, constant_impurity_density), # offsets - (0, 0, 0) # linear slopes + + config_dict["detectors"][1]["semiconductor"]["impurity_density"] = dicttype( + "name" => "constant", + "value" => "-1e9cm^-3" ) - # Charge Drift Model: - # Use example ADL charge drift model from SSD (Crystal axis <100> is at φ = 0): - adl_charge_drift_config_file = joinpath(dirname(dirname(pathof(SolidStateDetectors))), - "examples/example_config_files/ADLChargeDriftModel/drift_velocity_config.yaml") - charge_drift_model = SolidStateDetectors.ADLChargeDriftModel{T}(adl_charge_drift_config_file); - - semiconductor = SolidStateDetectors.Semiconductor(temperature, material, impurity_density_model, charge_drift_model, semiconductor_geometry) - - operation_voltage = T(meta.characterization.manufacturer.recommended_voltage_in_V) - pp_contact = SolidStateDetectors.Contact( zero(T), material, 1, "Point Contact", pp_contact_geometry ) - mantle_contact = SolidStateDetectors.Contact( operation_voltage, material, 2, "Mantle Contact", mantle_contact_geometry ) - - semiconductor, (pp_contact, mantle_contact) + # evaluate "include" statements - needed for the charge drift model + SolidStateDetectors.scan_and_merge_included_json_files!(config_dict, "") - passives = missing # possible holding structure around the detector - virtual_drift_volumes = missing - SolidStateDetector{T}( meta.name, semiconductor, [pp_contact, mantle_contact], passives, virtual_drift_volumes ) + return config_dict end end # module LegendDataManagementSolidStateDetectorsExt diff --git a/src/LegendDataManagement.jl b/src/LegendDataManagement.jl index 0b179470..8b9fc6c0 100644 --- a/src/LegendDataManagement.jl +++ b/src/LegendDataManagement.jl @@ -18,14 +18,14 @@ using PropertyDicts using StructArrays using Unitful using Measurements -using Measurements: ± +using Measurements: ±, value, uncertainty using Printf: @printf using IntervalSets: AbstractInterval, ClosedInterval, leftendpoint, rightendpoint using LRUCache: LRU using ProgressMeter: @showprogress -using PropertyFunctions: PropertyFunction, @pf, filterby, props2varsyms +using PropertyFunctions: PropertyFunction, @pf, filterby, props2varsyms, PropSelFunction using StaticStrings: StaticString import Tables using Tables: columns @@ -36,7 +36,6 @@ using MIMEs: mime_from_extension include("legend_report.jl") include("status_types.jl") include("atomic_fcreate.jl") -include("calfunc.jl") include("filekey.jl") include("dataset.jl") include("data_config.jl") @@ -49,6 +48,8 @@ include("dataprod_config.jl") include("calibration_functions.jl") include("evt_functions.jl") include("lprops.jl") +include("data_io.jl") +include("active_volume.jl") include("utils/utils.jl") end # module \ No newline at end of file diff --git a/src/active_volume.jl b/src/active_volume.jl new file mode 100644 index 00000000..56dfc491 --- /dev/null +++ b/src/active_volume.jl @@ -0,0 +1,183 @@ +# This file is a part of LegendDataManagement.jl, licensed under the MIT License (MIT). + +# These are function needed to calculate the active volume based on +# LEGEND detector metadata and measured dead layer thicknesses + + +@inline get_inner_taper_volume(x, y, h, r) = π * (r^2 * y / 3 - (r - x)^2 * (y + 2h) / 3) +@inline get_outer_taper_volume(x, y, h, r) = π * (r^2 * h - (r - x)^2 * h) - get_inner_taper_volume(x, y, h, r) + +function get_extra_volume(geometry::PropDict, ::Val{:crack}, fccd::T) where {T <: AbstractFloat} + # Find a picture of the definition of crack here: + # https://github.com/legend-exp/legend-metadata/blob/archived/hardware/detectors/detector-metadata_5.pdf + r = geometry.radius_in_mm - fccd + H = geometry.height_in_mm - 2*fccd + alpha = geometry.extra.crack.angle_in_deg + p0 = geometry.extra.crack.radius_in_mm + fccd * (secd(alpha) - tand(alpha) - 1) + return if iszero(alpha) + # Vertical crack + (r^2 * acos(1 - p0/r) - sqrt(2r*p0 - p0^2) * (r - p0)) * H + else + # Inclined crack + t = max(p0 - H * tand(alpha), p0 * 0) + int11 = (1 - t/r) * acos(1 - t/r) - sqrt(1 - (1 - t/r)^2) + int12 = (1 - p0/r) * acos(1 - p0/r) - sqrt(1 - (1 - p0/r)^2) + -cotd(alpha) * (r^3 * (int12 - int11) + ((2*r*p0 - p0^2)^(3/2) - (2*r*t - t^2)^(3/2))/ 3) + end +end + +function get_extra_volume(geometry::PropDict, ::Val{:topgroove}, fccd::AbstractFloat) + # Find a picture of the definition of topgroove here: + # https://github.com/legend-exp/legend-metadata/blob/archived/hardware/detectors/detector-metadata_4.pdf + rb = geometry.borehole.radius_in_mm + db = geometry.borehole.radius_in_mm + rg = geometry.extra.topgroove.radius_in_mm + dg = geometry.extra.topgroove.depth_in_mm + db <= dg && @warn "The depth of the borehole ($(db)mm) should be bigger than the depth of the topgroove ($(dg)mm)." + return π * ((rg + fccd)^2 - (rb + fccd)^2) * dg +end + +function get_extra_volume(geometry::PropDict, ::Val{:bottom_cylinder}, fccd::AbstractFloat) + # Find a picture of the definition of bottom_cylinder here: + # https://github.com/legend-exp/legend-metadata/blob/archived/hardware/detectors/detector-metadata_6.pdf + r = geometry.extra.bottom_cylinder.radius_in_mm - fccd + h = geometry.extra.bottom_cylinder.height_in_mm - fccd + t = geometry.extra.bottom_cylinder.transition_in_mm + R = geometry.radius_in_mm - fccd + return get_outer_taper_volume(R - r, t * R / (R - r), t, R) + π * h * (R^2 - r^2) +end + +function get_extra_volume(geometry::PropDict, fccd::T = .0) where {T <: AbstractFloat} + if isa(geometry.extra, PropDicts.MissingProperty) + return zero(T) + else + return get_extra_volume(geometry, Val(first(keys(geometry.extra))), fccd) + end +end + +# @inline get_mass(volume::U, enrichment::V) where {U <: AbstractFloat, V <: AbstractFloat} = +# volume / 1000 * (5.327 * enrichment * 76/72 + 5.327 * (1 - enrichment)) + +# Detector specific active volume calculation +function get_active_volume(pd::PropDict, ::Val{:bege}, fccd::T = .0) where {T <: AbstractFloat} + + g = pd.geometry + + R = g.radius_in_mm - fccd + H = g.height_in_mm - 2 * fccd + + # Groove + groove_volume = π * g.groove.depth_in_mm * (g.groove.radius_in_mm.outer^2 - g.groove.radius_in_mm.inner^2) + + # Top taper + h = g.taper.top.height_in_mm + α = g.taper.top.angle_in_deg / 360 * 2π + x = h * tan(α) + taper_top_volume = iszero(x) ? zero(T) : get_outer_taper_volume(x, h * R / x, h, R) + + # Bottom taper + h = g.taper.bottom.height_in_mm + α = g.taper.bottom.angle_in_deg / 360 * 2π + x = h * tan(α) + taper_bottom_volume = iszero(x) ? zero(T) : get_outer_taper_volume(x, h * R / x, h, R) + + return (π * R^2 * H - (groove_volume + taper_top_volume + taper_bottom_volume) + + π * g.groove.radius_in_mm.outer^2 * fccd - get_extra_volume(g, fccd)) * 1e-3u"cm^3" +end + +function get_active_volume(pd::PropDict, ::Val{:icpc}, fccd::T = .0) where {T <: AbstractFloat} + + g = pd.geometry + + R = g.radius_in_mm - fccd + H = g.height_in_mm - 2 * fccd + + # Borehole + rb = g.borehole.radius_in_mm + fccd + db = g.borehole.depth_in_mm + borehole_volume = π * rb^2 * db + + # Groove + groove_volume = π * g.groove.depth_in_mm * (g.groove.radius_in_mm.outer^2 - g.groove.radius_in_mm.inner^2) + + # Top taper + h = g.taper.top.height_in_mm + α = g.taper.top.angle_in_deg / 360 * 2π + x = h * tan(α) + taper_top_volume = iszero(x) ? zero(T) : get_outer_taper_volume(x, h * R / x, h, R) + + # Bottom taper + h = g.taper.bottom.height_in_mm + α = g.taper.bottom.angle_in_deg / 360 * 2π + x = h * tan(α) + taper_bottom_volume = iszero(x) ? zero(T) : get_outer_taper_volume(x, h * R / x, h, R) + + # Borehole taper + h = g.taper.bottom.height_in_mm + α = g.taper.bottom.angle_in_deg / 360 * 2π + x = h * tan(α) + taper_borehole_volume = iszero(x) ? zero(T) : get_inner_taper_volume(x, h * (rb + x) / x, h, rb + x) + + return (π * R^2 * H - (groove_volume + borehole_volume) - + taper_top_volume + taper_bottom_volume + taper_borehole_volume + + π * g.groove.radius_in_mm.outer^2 * fccd - get_extra_volume(g, fccd)) * 1e-3u"cm^3" +end + +function get_active_volume(pd::PropDict, ::Val{:coax}, fccd::T = .0) where {T <: AbstractFloat} + + g = pd.geometry + + R = g.radius_in_mm - fccd + H = g.height_in_mm - 2 * fccd + + # Borehole + rb = g.borehole.radius_in_mm + db = g.borehole.depth_in_mm + borehole_volume = π * rb^2 * db + + # Groove + groove_volume = π * g.groove.depth_in_mm * (g.groove.radius_in_mm.outer^2 - g.groove.radius_in_mm.inner^2) + + # Top taper + h = g.taper.top.height_in_mm + α = g.taper.top.angle_in_deg / 360 * 2π + x = h * tan(α) + taper_top_volume = iszero(x) ? zero(T) : get_outer_taper_volume(x, h * R / x, h, R) + + # Bottom taper + h = g.taper.bottom.height_in_mm + α = g.taper.bottom.angle_in_deg / 360 * 2π + x = h * tan(α) + taper_bottom_volume = iszero(x) ? zero(T) : get_outer_taper_volume(x, h * R / x, h, R) + + return (π * R^2 * H - (taper_top_volume + taper_bottom_volume + groove_volume + borehole_volume) + + π * g.groove.radius_in_mm.outer^2 * fccd - get_extra_volume(g, fccd)) * 1e-3u"cm^3" +end + + +function get_active_volume(pd::PropDict, ::Val{:ppc}, fccd::T = .0) where {T <: AbstractFloat} + + g = pd.geometry + + R = g.radius_in_mm - fccd + H = g.height_in_mm - 2 * fccd + + # Top taper + h = g.taper.top.height_in_mm + α = g.taper.top.angle_in_deg / 360 * 2π + x = h * tan(α) + taper_top_volume = iszero(x) ? zero(T) : get_outer_taper_volume(x, h * R / x, h, R) + + # Bottom taper + h = g.taper.bottom.height_in_mm + α = g.taper.bottom.angle_in_deg / 360 * 2π + x = h * tan(α) + taper_bottom_volume = iszero(x) ? zero(T) : get_outer_taper_volume(x, h * R / x, h, R) + + return (π * R^2 * H - (taper_top_volume + taper_bottom_volume) + + π * g.pp_contact.radius_in_mm^2 * fccd - get_extra_volume(g, fccd)) * 1e-3u"cm^3" +end + +function get_active_volume(pd::PropDict, fccd::AbstractFloat) + get_active_volume(pd, Val(Symbol(pd.type)), fccd) +end diff --git a/src/calfunc.jl b/src/calfunc.jl deleted file mode 100644 index 8ab66c68..00000000 --- a/src/calfunc.jl +++ /dev/null @@ -1,49 +0,0 @@ -# This file is a part of LegendDataManagement.jl, licensed under the MIT License (MIT). - - -export PolCalFunc - -struct PolCalFunc{N,T<:Number} <:Function - params::NTuple{N,T} - - PolCalFunc(params::T...) where T = new{length(params),T}(params) -end - - -function (f::PolCalFunc{N,T})(x::U) where {N,T,U} - R = promote_type(T, U) - y = zero(R) - xn = one(U) - @inbounds for p in f.params - y = R(fma(p, xn, y)) - xn *= x - end - y -end - - -function Base.convert(::Type{PolCalFunc{N,T}}, p::PropDict) where {N,T} - funcstr = p.func - if !(funcstr in ("pol1", "[0]+[1]*x", "pol2")) - error("Unsupported function \"$funcstr\" for $(PolCalFunc{N,T})") - end - - coeffs = zeros(T, N) - for (i,v) in p.params - coeffs[i + 1] = v - end - PolCalFunc(coeffs...) -end - - -function Base.convert(::Type{Dict{Int, PolCalFunc{N,T}}}, p::PropDict) where {N,T} - calfuncs = Dict{Int, PolCalFunc{N,T}}() - for (ch, caldict) in p - calfuncs[ch] = convert(PolCalFunc{2,Float64}, caldict) - end - calfuncs -end - - -const CalFuncDict = Dict{Int,PolCalFunc{2,Float64}} -export CalFuncDict diff --git a/src/calibration_functions.jl b/src/calibration_functions.jl index e420eed2..815f2983 100644 --- a/src/calibration_functions.jl +++ b/src/calibration_functions.jl @@ -3,6 +3,8 @@ _get_cal_values(pd::PropsDB, sel::AnyValiditySelection) = get_values(pd(sel)) _get_cal_values(pd::NoSuchPropsDBEntry, sel::AnyValiditySelection) = PropDicts.PropDict() + +### HPGe calibration functions const _cached_get_ecal_props = LRU{Tuple{UInt, AnyValiditySelection}, Union{PropDict,PropDicts.MissingProperty}}(maxsize = 10^3) function _get_ecal_props(data::LegendData, sel::AnyValiditySelection) @@ -17,28 +19,10 @@ function _get_ecal_props(data::LegendData, sel::AnyValiditySelection, detector:: end function _get_e_cal_propsfunc_str(data::LegendData, sel::AnyValiditySelection, detector::DetectorId, e_filter::Symbol) - ecal_props::String = get(get(get(_get_ecal_props(data, sel, detector), e_filter, PropDict()), :cal, PropDict()), :func, "0keV / 0") + ecal_props::String = get(get(get(_get_ecal_props(data, sel, detector), e_filter, PropDict()), :cal, PropDict()), :func, "e_max * NaN*keV") return ecal_props end -const _cached_get_aoecal_props = LRU{Tuple{UInt, AnyValiditySelection}, Union{PropDict,PropDicts.MissingProperty}}(maxsize = 10^3) - -function _get_aoecal_props(data::LegendData, sel::AnyValiditySelection) - key = (objectid(data), sel) - get!(_cached_get_aoecal_props, key) do - _get_cal_values(dataprod_parameters(data).ppars.aoe, sel) - end -end - -function _get_aoecal_props(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) - get(_get_aoecal_props(data, sel), Symbol(detector), PropDict()) -end - -function _get_aoe_cal_propfunc_str(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) - psdcal_props::String = get(_get_aoecal_props(data, sel, detector), :func, "0 / 0") - return psdcal_props -end - const _cached_dataprod_ged_cal = LRU{Tuple{UInt, AnyValiditySelection}, Union{PropDict,PropDicts.MissingProperty}}(maxsize = 10^3) function _dataprod_ged_cal(data::LegendData, sel::AnyValiditySelection) @@ -65,14 +49,71 @@ function get_ged_cal_propfunc(data::LegendData, sel::AnyValiditySelection, detec let energies = Symbol.(_dataprod_ged_cal(data, sel, detector).energy_types), energies_cal = Symbol.(_dataprod_ged_cal(data, sel, detector).energy_types .* "_cal") ljl_propfunc(Dict{Symbol, String}( - append!(energies_cal, [:aoe_classifier]) .=> - append!(_get_e_cal_propsfunc_str.(Ref(data), Ref(sel), Ref(detector), energies), [_get_aoe_cal_propfunc_str(data, sel, detector)]) + energies_cal .=> _get_e_cal_propsfunc_str.(Ref(data), Ref(sel), Ref(detector), energies) )) end end export get_ged_cal_propfunc + +### HPGe PSD calibration functions +const _cached_get_aoecal_props = LRU{Tuple{UInt, AnyValiditySelection, Symbol, Symbol}, Union{PropDict,PropDicts.MissingProperty}}(maxsize = 10^3) + +function _get_aoecal_props(data::LegendData, sel::AnyValiditySelection; pars_type::Symbol=:ppars, pars_cat::Symbol=:aoe) + key = (objectid(data), sel, pars_type, pars_cat) + get!(_cached_get_aoecal_props, key) do + _get_cal_values(dataprod_parameters(data)[pars_type][pars_cat], sel) + end +end + +function _get_aoecal_props(data::LegendData, sel::AnyValiditySelection, detector::DetectorId; pars_type::Symbol=:ppars, pars_cat::Symbol=:aoe) + get(_get_aoecal_props(data, sel; pars_type=pars_type, pars_cat=pars_cat), Symbol(detector), PropDict()) +end + +function _get_aoe_cal_propfunc_str(data::LegendData, sel::AnyValiditySelection, detector::DetectorId, aoe_type::Symbol; pars_type::Symbol=:ppars, pars_cat::Symbol=:aoe) + aoecal_props::String = get(_get_aoecal_props(data, sel, detector; pars_type=pars_type, pars_cat=pars_cat)[aoe_type], :func, "a_raw * NaN") + return aoecal_props +end + +const _cached_dataprod_psd = LRU{Tuple{UInt, AnyValiditySelection}, Union{PropDict,PropDicts.MissingProperty}}(maxsize = 10^3) + +function _dataprod_psd(data::LegendData, sel::AnyValiditySelection) + key = (objectid(data), sel) + get!(_cached_dataprod_psd, key) do + dataprod_config(data).psd(sel) + end +end + +function _dataprod_aoe(data::LegendData, sel::AnyValiditySelection, detector::DetectorId; pars_type::Symbol=:ppars) + dataprod_aoe_config = _dataprod_psd(data, sel).aoe + @assert pars_type in (:ppars, :rpars) "pars_type must be either :ppars or :rpars" + merge(dataprod_aoe_config[ifelse(pars_type == :ppars, :p_default, :default)], get(ifelse(pars_type == :ppars, get(dataprod_aoe_config, :p, PropDict()), dataprod_aoe_config), detector, PropDict())) +end + +""" + get_ged_psd_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId; pars_type::Symbol=:ppars, pars_cat::Symbol=:aoe) + +Get the HPGe psd calibration function for the given data, validity selection and +detector. + +Note: Caches configuration/calibration data internally, use a fresh `data` +object if on-disk configuration/calibration data may have changed. +""" +function get_ged_psd_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId; pars_type::Symbol=:ppars, pars_cat::Symbol=:aoe) + let aoe_types = collect(keys(_dataprod_aoe(data, sel, detector; pars_type=pars_type).aoe_funcs)), aoe_classifier = Symbol.(string.(keys(_dataprod_aoe(data, sel, detector; pars_type=pars_type).aoe_funcs)) .* "_classifier") + + ljl_propfunc(Dict{Symbol, String}( + aoe_classifier .=> _get_aoe_cal_propfunc_str.(Ref(data), Ref(sel), Ref(detector), aoe_types; pars_type=pars_type, pars_cat=pars_cat) + )) + end +end +export get_ged_psd_propfunc + + + +### HPGe QC calibration functions + const _cached_dataprod_qc = LRU{Tuple{UInt, AnyValiditySelection}, Union{PropDict,PropDicts.MissingProperty}}(maxsize = 10^3) function _dataprod_qc(data::LegendData, sel::AnyValiditySelection) @@ -152,33 +193,57 @@ end export get_ged_qc_is_baseline_propfunc -const _cached_dataprod_pars_p_psd = LRU{Tuple{UInt, AnyValiditySelection}, Union{PropDict,PropDicts.MissingProperty}}(maxsize = 10^3) -function _dataprod_pars_p_psd(data::LegendData, sel::AnyValiditySelection) - data_id = objectid(data) - key = (objectid(data), sel) - get!(_cached_dataprod_pars_p_psd, key) do - _get_cal_values(dataprod_parameters(data).ppars.aoe, sel) - end -end -function _dataprod_pars_p_psd(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) - get(_dataprod_pars_p_psd(data, sel), Symbol(detector), PropDict()) -end +### HPGe cut functions """ LegendDataManagement.dataprod_pars_aoe_window(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) Get the A/E cut window for the given data, validity selection and detector. """ -function dataprod_pars_aoe_window(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) - aoecut_lo::Float64 = get(_dataprod_pars_p_psd(data, sel, detector).cut, :lowcut, NaN) - aoecut_hi::Float64 = get(_dataprod_pars_p_psd(data, sel, detector).cut, :highcut, NaN) +function dataprod_pars_aoe_window(data::LegendData, sel::AnyValiditySelection, detector::DetectorId, aoe_classifier::Symbol; pars_type::Symbol=:ppars, pars_cat::Symbol=:aoe) + aoecut_lo::Float64 = get(_get_aoecal_props(data, sel, detector; pars_type=pars_type, pars_cat=pars_cat)[aoe_classifier], :lowcut, -Inf) + aoecut_hi::Float64 = get(_get_aoecal_props(data, sel, detector; pars_type=pars_type, pars_cat=pars_cat)[aoe_classifier], :highcut, Inf) ClosedInterval(aoecut_lo, aoecut_hi) end export dataprod_pars_aoe_window +function _get_ged_aoe_lowcut_propfunc_str(data::LegendData, sel::AnyValiditySelection, detector::DetectorId, aoe_classifier::Symbol; pars_type::Symbol=:ppars, pars_cat::Symbol=:aoe) + "$(aoe_classifier) > $(leftendpoint(dataprod_pars_aoe_window(data, sel, detector, aoe_classifier; pars_type=pars_type, pars_cat=pars_cat)))" +end + +function _get_ged_aoe_dscut_propfunc_str(data::LegendData, sel::AnyValiditySelection, detector::DetectorId, aoe_classifier::Symbol; pars_type::Symbol=:ppars, pars_cat::Symbol=:aoe) + "$(aoe_classifier) > $(leftendpoint(dataprod_pars_aoe_window(data, sel, detector, aoe_classifier; pars_type=pars_type, pars_cat=pars_cat))) && $(aoe_classifier) < $(rightendpoint(dataprod_pars_aoe_window(data, sel, detector, aoe_classifier; pars_type=pars_type, pars_cat=pars_cat)))" +end + +""" + LegendDataManagement.get_ged_aoe_cut_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId; pars_type::Symbol=:ppars, pars_cat::Symbol=:aoe) + +Get the A/E cut propfuncs for the given data, validity selection and detector. +""" +function get_ged_aoe_cut_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId; pars_type::Symbol=:ppars, pars_cat::Symbol=:aoe) + let aoe_classifiers = Symbol.(_dataprod_aoe(data, sel, detector; pars_type=pars_type).aoe_classifiers), aeo_low_cut = Symbol.(_dataprod_aoe(data, sel, detector; pars_type=pars_type).aoe_classifiers .* "_low_cut"), + aoe_ds_cut = Symbol.(_dataprod_aoe(data, sel, detector; pars_type=pars_type).aoe_classifiers .* "_ds_cut") + + ljl_propfunc( + merge( + Dict{Symbol, String}( + aeo_low_cut .=> _get_ged_aoe_lowcut_propfunc_str.(Ref(data), Ref(sel), Ref(detector), aoe_classifiers; pars_type=pars_type, pars_cat=pars_cat) + ), + Dict{Symbol, String}( + aoe_ds_cut .=> _get_ged_aoe_dscut_propfunc_str.(Ref(data), Ref(sel), Ref(detector), aoe_classifiers; pars_type=pars_type, pars_cat=pars_cat) + ) + ) + ) + end +end +export get_ged_aoe_cut_propfunc + + + +### SiPM LAr cut functions const _cached_get_larcal_props = LRU{Tuple{UInt, AnyValiditySelection}, Union{PropDict,PropDicts.MissingProperty}}(maxsize = 10^3) diff --git a/src/data_io.jl b/src/data_io.jl new file mode 100644 index 00000000..95b78055 --- /dev/null +++ b/src/data_io.jl @@ -0,0 +1,46 @@ +# This file is a part of jl, licensed under the MIT License (MIT). + +""" + read_ldata(data::LegendData, selectors...; kwargs...) + read_ldata(f, data::LegendData, selectors...; kwargs...) + read_ldata(columns::NTuple{<:Any, Symbol}, data::LegendData, selectors::Tuple; kwargs...) + read_ldata(column::Symbol, data::LegendData, selectors::Tuple; kwargs...) + +Read `lh5` data from disk for a given set of `selectors`. After reading in, a PropertyFunction `f` can be applied to the data. +If a tuple of `Symbol`s is given, the properties from the tuple are selected. If the `n_evts` kwarg is provided, a random selection with `n_evts` number of +events per file is performed. `ch` can be either a `ChannelId` or a `DetectorId`. +# Examples +```julia +dsp = read_ldata(l200, :jldsp, filekey, ch) +dsp = read_ldata((:e_cusp, :e_trap, :blmean), l200, :jldsp, filekey, ch) +dsp = read_ldata(:e_cusp, l200, :jldsp, filekey, ch) +dsp = read_ldata(l200, :jldsp, :cal, :p03, :r000, ch) + +dsp = read_ldata(l200, :jldsp, :cal, DataPartition(1), ch) +dsp = read_ldata(l200, :jldsp, :cal, DataPeriod(3), ch) +dsp = read_ldata(l200, :jldsp, :cal, runinfo(l200)[1:3], ch) + +dsp = read_ldata(l200, :jldsp, filekey, ch; n_evts=1000) +``` +""" +function read_ldata end +export read_ldata + +read_ldata(data::LegendData, selectors...; kwargs...) = read_ldata(identity, data, selectors; kwargs...) + +read_ldata(f, data::LegendData, selectors...; kwargs...) = read_ldata(f, data, selectors; kwargs...) + +read_ldata(data::LegendData, selectors::Tuple; kwargs...) = read_ldata(identity, data, selectors; kwargs...) + +read_ldata(columns::NTuple{<:Any, Symbol}, data::LegendData, selectors::Tuple; kwargs...) = read_ldata(PropSelFunction{columns}(), data, selectors; kwargs...) + +read_ldata(column::Symbol, data::LegendData, selectors::Tuple; kwargs...) = read_ldata((column, ), data, selectors; kwargs...) + +_lh5_ext_loaded(::Val) = false + +function read_ldata(f::Base.Callable, data::LegendData, selectors::Tuple; kwargs...) + if !_lh5_ext_loaded(Val(true)) + throw(ErrorException("read_ldata requires LegendHDF5IO.jl to be loaded, e.g. via `using LegendHDF5IO`")) + end + throw(ArgumentError("read_ldata doesn't support argument types $(typeof.((f, data, selectors))) with keyword arguments $(keys(kwargs))")) +end \ No newline at end of file diff --git a/src/dataprod_config.jl b/src/dataprod_config.jl index a468547c..7cfdf31d 100644 --- a/src/dataprod_config.jl +++ b/src/dataprod_config.jl @@ -145,16 +145,34 @@ function _resolve_partition_runs(data::LegendData, period::DataPeriod, runs::Abs end """ - partitioninfo(data::LegendData, s::ChannelIdLike) + partitioninfo(data::LegendData, ch::ChannelId) + partitioninfo(data::LegendData, ch::ChannelId, part::DataPartitionLike) + partitioninfo(data::LegendData, ch::ChannelId, period::DataPeriodLike) Return cross-period data partitions. """ -function partitioninfo(data::LegendData, ch::ChannelIdLike) +function partitioninfo end +export partitioninfo +function partitioninfo(data::LegendData, ch::ChannelId) _get_partitions(data, Symbol(ChannelId(ch))) end +function partitioninfo(data::LegendData, det::DetectorIdLike) + ch = channelinfo(data, first(filter(!ismissing, runinfo(data).cal.startkey)), det).channel + partitioninfo(data, ch) +end partitioninfo(data, ch, part::DataPartition) = partitioninfo(data, ch)[part] partitioninfo(data, ch, period::DataPeriod) = sort(Vector{DataPartition}([p for (p, pinfo) in partitioninfo(data, ch) if period in pinfo.period])) -export partitioninfo +function partitioninfo(data, ch, p::Union{Symbol, AbstractString}) + if _can_convert_to(DataPartition, p) + partitioninfo(data, ch, DataPartition(p)) + elseif _can_convert_to(DataPeriod, p) + partitioninfo(data, ch, DataPeriod(p)) + else + throw(ArgumentError("Invalid specification \"$p\". Must be of type DataPartition or DataPeriod")) + end +end +partitioninfo(data, ch, period::DataPeriodLike, run::DataRunLike) = sort(Vector{DataPartition}([p for (p, pinfo) in partitioninfo(data, ch) if any(map(row -> row.period == DataPeriod(period) && row.run == DataRun(run), pinfo))])) + Base.Broadcast.broadcasted(f::typeof(partitioninfo), data::LegendData, ch::ChannelId, p::Vector{<:DataPeriod}) = unique(vcat(f.(Ref(data), Ref(ch), p)...)) @@ -244,8 +262,8 @@ function runinfo(data::LegendData) @NamedTuple{period::DataPeriod, run::DataRun, is_analysis_cal_run::Bool, is_analysis_phy_run::Bool, cal::nttype, phy::nttype, fft::nttype}((period, run, Bool(is_ana_cal_run), Bool(is_ana_phy_run), get_cat_entry(:cal), get_cat_entry(:phy), get_cat_entry(:fft))) end periods_and_runs = [[make_row(p, r, ri) for (r, ri) in rs] for (p, rs) in rinfo] - flat_pr = vcat(periods_and_runs...)::Vector{@NamedTuple{period::DataPeriod, run::DataRun, is_analysis_cal_run::Bool, is_analysis_phy_run::Bool, cal::nttype, phy::nttype, fft::nttype}} - Table(sort(StructArray(flat_pr))) + flat_pr = sort(StructArray(vcat(periods_and_runs...)::Vector{@NamedTuple{period::DataPeriod, run::DataRun, is_analysis_cal_run::Bool, is_analysis_phy_run::Bool, cal::nttype, phy::nttype, fft::nttype}})) + Table(merge(columns(flat_pr), (cal = Table(StructArray(flat_pr.cal)), phy = Table(StructArray(flat_pr.phy)), fft = Table(StructArray(flat_pr.fft))))) end end export runinfo @@ -269,15 +287,18 @@ function runinfo(data::LegendData, runsel::RunCategorySelLike) getproperty(runinfo(data, (period, run)), Symbol(category)) end runinfo(data, fk::FileKey) = runinfo(data, (fk.period, fk.run, fk.category)) - +runinfo(data, selectors...) = runinfo(data, selectors) """ start_filekey(data::LegendData, runsel::RunCategorySelLike) Get the starting filekey for `data` in `period`, `run`, `category`. """ -start_filekey(data::LegendData, runsel::RunCategorySelLike) = runinfo(data, runsel).startkey +function start_filekey end export start_filekey +start_filekey(data::LegendData, runsel::RunCategorySelLike) = runinfo(data, runsel).startkey +start_filekey(data::LegendData, fk::FileKey) = start_filekey(data, (fk.period, fk.run, fk.category)) +start_filekey(data::LegendData, selectors...) = start_filekey(data, selectors) """ @@ -285,9 +306,10 @@ export start_filekey Get the livetime for `data` in physics data taking of `run` in `period`. """ -livetime(data::LegendData, runsel::RunCategorySelLike) = runinfo(data, runsel).livetime +function livetime end export livetime - +livetime(data::LegendData, runsel::RunCategorySelLike) = runinfo(data, runsel).livetime +livetime(data, selectors...) = livetime(data, selectors) """ is_lrun(data::LegendData, runsel::RunSelLike) @@ -303,6 +325,8 @@ function is_lrun(data::LegendData, runsel::RunCategorySelLike)::Bool false end end +is_lrun(data::LegendData, fk::FileKey) = is_lrun(data, (fk.period, fk.run, fk.category)) +is_lrun(data::LegendData, selectors...) = is_lrun(data, selectors) export is_lrun """ @@ -326,6 +350,8 @@ is_analysis_cal_run(data::LegendData, runsel::RunSelLike) = runinfo(data, runsel Return `true` if `run` is an `cat` analysis run for `data` in `period`. """ +function is_analysis_run end +export is_analysis_run function is_analysis_run(data::LegendData, runsel::RunCategorySelLike) # first check if it is a legend run at all if !is_lrun(data, runsel) @@ -342,9 +368,10 @@ function is_analysis_run(data::LegendData, runsel::RunCategorySelLike) throw(ArgumentError("Invalid category $(runs.category) for analysis run")) end end -export is_analysis_run +is_analysis_run(data::LegendData, fk::FileKey) = is_analysis_run(data, (fk.period, fk.run, fk.category)) +is_analysis_run(data::LegendData, selectors...) = is_analysis_run(data, selectors) -const _cached_bad_filekeys = LRU{UInt, Set{FileKey}}(maxsize = 10) +const _cached_bad_filekeys = LRU{UInt, Set{FileKey}}(maxsize = 10^3) """ bad_filekeys(data::LegendData) diff --git a/src/evt_functions.jl b/src/evt_functions.jl index 827f83df..4900d41f 100644 --- a/src/evt_functions.jl +++ b/src/evt_functions.jl @@ -9,41 +9,129 @@ function _dataprod_evt(data::LegendData, sel::AnyValiditySelection) end end -function _dataprod_evt(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) - get(_dataprod_evt(data, sel), detector, _dataprod_evt(data, sel).default) +function _dataprod_evt(data::LegendData, sel::AnyValiditySelection, system::Symbol) + _dataprod_evt(data, sel)[system] end -const _cached_dataprod_evt_chdata_pf = LRU{Tuple{UInt, AnyValiditySelection, DetectorId}, PropertyFunction}(maxsize = 10^2) +### HPGe """ - get_ged_evt_chdata_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) + get_ged_evt_chdata_propfunc(data::LegendData, sel::AnyValiditySelection) Get the Ge-detector channel data output PropertyFunction. """ -function get_ged_evt_chdata_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) - key = (objectid(data), sel, detector) - get!(_cached_dataprod_evt_chdata_pf, key) do - chdata_def_props = _dataprod_evt(data, sel, detector).chdata_output - return ljl_propfunc(chdata_def_props) - end +function get_ged_evt_chdata_propfunc(data::LegendData, sel::AnyValiditySelection) + ljl_propfunc(_dataprod_evt(data, sel, :geds).chdata_output) end export get_ged_evt_chdata_propfunc +""" + get_ged_evt_chsel_propfunc(data::LegendData, sel::AnyValiditySelection) + +Get the Ge-detector channel selection PropertyFunction. +""" +function get_ged_evt_chsel_propfunc(data::LegendData, sel::AnyValiditySelection) + ljl_propfunc(_dataprod_evt(data, sel, :geds).channels) +end +export get_ged_evt_chsel_propfunc + +""" + get_ged_evt_hitchsel_propfunc(data::LegendData, sel::AnyValiditySelection) + +Get the hit Ge-detector channel selection PropertyFunction. +""" +function get_ged_evt_hitchsel_propfunc(data::LegendData, sel::AnyValiditySelection) + ljl_propfunc(_dataprod_evt(data, sel, :geds).hitchannels) +end +export get_ged_evt_hitchsel_propfunc + + +""" + get_ged_evt_kwargs(data::LegendData, sel::AnyValiditySelection) + +Get the Ge-detector evt kwargs. +""" +function get_ged_evt_kwargs(data::LegendData, sel::AnyValiditySelection) + kwargs = _dataprod_evt(data, sel, :geds).kwargs + NamedTuple([(k, if v isa String Symbol(v) else v end) for (k, v) in pairs(kwargs)]) +end +export get_ged_evt_kwargs + + + +### aux + const _cached_dataprod_evt_puls_pf = LRU{Tuple{UInt, AnyValiditySelection, DetectorId}, PropertyFunction}(maxsize = 10^2) """ - get_pulser_cal_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) + get_aux_cal_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) -Get the pulser calibration function for the given data, validity selection -and the pulser channel referred to by `detector`. +Get the aux calibration function for the given data, validity selection +and the aux channel referred to by `detector`. """ -function get_pulser_cal_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) +function get_aux_cal_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) key = (objectid(data), sel, detector) get!(_cached_dataprod_evt_puls_pf, key) do - cal_def_props = _dataprod_evt(data, sel, detector).cal - return ljl_propfunc(cal_def_props) + ljl_propfunc(_dataprod_evt(data, sel, :aux)[detector].cal) end end -export get_pulser_cal_propfunc \ No newline at end of file +export get_aux_cal_propfunc + +""" + get_aux_evt_chdata_propfunc(data::LegendData, sel::AnyValiditySelection) + +Get the aux channel data output PropertyFunction. +""" +function get_aux_evt_chdata_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) + ljl_propfunc(_dataprod_evt(data, sel, :aux)[detector].chdata_output) +end +export get_aux_evt_chdata_propfunc + +""" + get_aux_evt_levelname_propfunc(data::LegendData, sel::AnyValiditySelection) + +Get the aux event level name. +""" +function get_aux_evt_levelname_propfunc(data::LegendData, sel::AnyValiditySelection, detector::DetectorId) + Symbol(_dataprod_evt(data, sel, :aux)[detector].evt_levelname) +end +get_aux_evt_levelname_propfunc(data::LegendData, sel::AnyValiditySelection, channel::ChannelId) = get_aux_evt_levelname_propfunc(data, sel, channelinfo(data, sel, channel).detector) +export get_aux_evt_levelname_propfunc + + +""" + get_aux_evt_chsel_propfunc(data::LegendData, sel::AnyValiditySelection) + +Get the aux channel selection PropertyFunction. +""" +function get_aux_evt_chsel_propfunc(data::LegendData, sel::AnyValiditySelection) + ljl_propfunc(_dataprod_evt(data, sel, :aux).channels) +end +export get_aux_evt_chsel_propfunc + + + +### SPMS + +""" + get_spms_evt_chdata_propfunc(data::LegendData, sel::AnyValiditySelection) + +Get the Ge-detector channel data output PropertyFunction. +""" +function get_spms_evt_chdata_propfunc(data::LegendData, sel::AnyValiditySelection) + ljl_propfunc(_dataprod_evt(data, sel, :spms).chdata_output) +end +export get_spms_evt_chdata_propfunc + +""" + get_spms_evt_chsel_propfunc(data::LegendData, sel::AnyValiditySelection) + +Get the SiPM channel selection PropertyFunction. +""" +function get_spms_evt_chsel_propfunc(data::LegendData, sel::AnyValiditySelection) + ljl_propfunc(_dataprod_evt(data, sel, :spms).channels) +end +export get_spms_evt_chsel_propfunc + diff --git a/src/filekey.jl b/src/filekey.jl index 04a359c9..9c086e93 100644 --- a/src/filekey.jl +++ b/src/filekey.jl @@ -230,7 +230,7 @@ _can_convert_to(::Type{DataPeriod}, s) = false function DataPeriod(s::AbstractString) m = match(period_expr, s) if (m == nothing) - throw(ArgumentError("String \"$s\" does not look like a valid file LEGEND data-run name")) + throw(ArgumentError("String \"$s\" does not look like a valid file LEGEND data-period name")) else DataPeriod(parse(Int, (m::RegexMatch).captures[1])) end @@ -337,7 +337,7 @@ export DataCategory Base.:(==)(a::DataCategory, b::DataCategory) = a.label == b.label Base.isless(a::DataCategory, b::DataCategory) = isless(a.label, b.label) -const category_expr = r"^([a-z]+)$" +const category_expr = r"^[a-z]{3}$" _can_convert_to(::Type{DataCategory}, s::AbstractString) = !isnothing(match(category_expr, s)) _can_convert_to(::Type{DataCategory}, s::Symbol) = _can_convert_to(DataCategory, string(s)) @@ -649,7 +649,7 @@ end const ch_expr = r"^ch([0-9]{3}|(?:0[1-9]|[1-9][0-9])[0-9]{5})$" _can_convert_to(::Type{ChannelId}, s::AbstractString) = !isnothing(match(ch_expr, s)) -_can_convert_to(::Type{ChannelId}, s::Int) = _can_convert_to(ChannelId, lpad(0, no < 1000 ? 3 : 7, '0')) +_can_convert_to(::Type{ChannelId}, s::Int) = _can_convert_to(ChannelId, "ch$s") _can_convert_to(::Type{ChannelId}, s::ChannelId) = true _can_convert_to(::Type{ChannelId}, s) = false diff --git a/src/legend_data.jl b/src/legend_data.jl index 8a2f28ce..21946153 100644 --- a/src/legend_data.jl +++ b/src/legend_data.jl @@ -235,6 +235,9 @@ function search_disk(::Type{DT}, path::AbstractString) where DT<:DataSelector return unique(sort(DT.(valid_files))) end + +const _cached_channelinfo = LRU{Tuple{UInt, AnyValiditySelection}, StructVector}(maxsize = 10^3) + """ channelinfo(data::LegendData, sel::AnyValiditySelection; system::Symbol = :all, only_processable::Bool = false) channelinfo(data::LegendData, sel::RunCategorySelLike; system::Symbol = :all, only_processable::Bool = false) @@ -242,72 +245,95 @@ end Get all channel information for the given [`LegendData`](@ref) and [`ValiditySelection`](@ref). """ -function channelinfo(data::LegendData, sel::AnyValiditySelection; system::Symbol = :all, only_processable::Bool = false) - chmap = data.metadata(sel).hardware.configuration.channelmaps - diodmap = data.metadata.hardware.detectors.germanium.diodes - dpcfg = data.metadata(sel).dataprod.config.analysis - - channel_keys = collect(keys(chmap)) - - _convert_location(l::AbstractString) = (location = Symbol(l), detstring = -1, position = -1, fiber = "") - - function _convert_location(l::PropDict) - ( - location = :array, - detstring = get(l, :string, -1), - position = _convert_pos(get(l, :position, -1)), - fiber = get(l, :fiber, ""), - ) - end +function channelinfo(data::LegendData, sel::AnyValiditySelection; system::Symbol = :all, only_processable::Bool = false, extended::Bool = false) + key = (objectid(data), sel) + chinfo = get!(_cached_channelinfo, key) do + chmap = data.metadata(sel).hardware.configuration.channelmaps + diodmap = data.metadata.hardware.detectors.germanium.diodes + dpcfg = data.metadata(sel).dataprod.config.analysis + + channel_keys = collect(keys(chmap)) + + _convert_location(l::AbstractString) = (location = Symbol(l), detstring = -1, position = -1, fiber = "") + + function _convert_location(l::PropDict) + ( + location = :array, + detstring = get(l, :string, -1), + position = _convert_pos(get(l, :position, -1)), + fiber = get(l, :fiber, ""), + ) + end - _convert_location(l::PropDicts.MissingProperty) = (location = :unknown, detstring = -1, position = -1, fiber = "") + _convert_location(l::PropDicts.MissingProperty) = (location = :unknown, detstring = -1, position = -1, fiber = "") + + _convert_pos(p::Integer) = Int(p) + function _convert_pos(p::AbstractString) + if p == "top" + 1 + elseif p == "bottom" + 0 + else + -1 + end + end - _convert_pos(p::Integer) = Int(p) - function _convert_pos(p::AbstractString) - if p == "top" - 1 - elseif p == "bottom" - 0 - else - -1 + function make_row(k::Symbol) + + fcid::Int = get(chmap[k].daq, :fcid, -1) + rawid::Int = chmap[k].daq.rawid + channel::ChannelId = ChannelId(rawid) + + detector::DetectorId = DetectorId(k) + det_type::Symbol = Symbol(ifelse(haskey(diodmap, k), diodmap[k].type, :unknown)) + local system::Symbol = Symbol(chmap[k].system) + processable::Bool = get(dpcfg[k], :processable, false) + usability::Symbol = Symbol(get(dpcfg[k], :usability, :unknown)) + is_blinded::Bool = get(dpcfg[k], :is_blinded, false) + low_aoe_status::Symbol = Symbol(get(get(get(dpcfg[k], :psd, PropDict()), :status, PropDict()), Symbol("low_aoe"), :unknown)) + high_aoe_status::Symbol = Symbol(get(get(get(dpcfg[k], :psd, PropDict()), :status, PropDict()), Symbol("high_aoe"), :unknown)) + lq_status::Symbol = Symbol(get(get(get(dpcfg[k], :psd, PropDict()), :status, PropDict()), Symbol("lq"), :unknown)) + batch5_dt_cut::Symbol = Symbol(get(get(get(dpcfg[k], :psd, PropDict()), :status, PropDict()), Symbol("batch5_dt_cut"), :unknown)) + is_bb_like::String = replace(get(get(dpcfg[k], :psd, PropDict()), :is_bb_like, ""), "&" => "&&") + + location::Symbol, detstring::Int, position::Int, fiber::StaticString{8} = _convert_location(chmap[k].location) + + c = (; + detector, channel, fcid, rawid, system, processable, usability, is_blinded, low_aoe_status, high_aoe_status, lq_status, batch5_dt_cut, is_bb_like, det_type, + location, detstring, fiber, position + ) + + if extended + cc4::StaticString{8} = get(chmap[k].electronics.cc4, :id, "") + cc4ch::Int = get(chmap[k].electronics.cc4, :channel, -1) + daqcrate::Int = get(chmap[k].daq, :crate, -1) + daqcard::Int = chmap[k].daq.card.id + hvcard::Int = get(chmap[k].voltage.card, :id, -1) + hvch::Int = get(chmap[k].voltage, :channel, -1) + + enrichment::Unitful.Quantity{<:Measurement{<:Float64}} = if haskey(diodmap, k) && haskey(diodmap[k].production, :enrichment) measurement(diodmap[k].production.enrichment.val, diodmap[k].production.enrichment.unc) else measurement(Float64(NaN), Float64(NaN)) end *100u"percent" + mass::Unitful.Mass{<:Float64} = if haskey(diodmap, k) && haskey(diodmap[k].production, :mass_in_g) diodmap[k].production.mass_in_g else Float64(NaN) end *1e-3*u"kg" + + total_volume::Unitful.Volume{<:Float64} = if haskey(diodmap, k) get_active_volume(diodmap[k], 0.0) else Float64(NaN) * u"cm^3" end + fccds = diodmap[k].characterization.l200_site.fccd_in_mm + fccd::Float64 = if isa(fccds, NoSuchPropsDBEntry) || + isa(fccds, PropDicts.MissingProperty) || + isa(fccds[first(keys(fccds))].value, PropDicts.MissingProperty) + + haskey(diodmap, k) && @warn "No FCCD value given for detector $(detector)" + 0.0 + else + fccds[first(keys(fccds))].value + end + active_volume::Unitful.Volume{<:Float64} = if haskey(diodmap, k) get_active_volume(diodmap[k], fccd) else Float64(NaN) * u"cm^3" end + c = merge(c, (; cc4, cc4ch, daqcrate, daqcard, hvcard, hvch, enrichment, mass, total_volume, active_volume)) + end + + c end - end - function make_row(k::Symbol) - fcid::Int = get(chmap[k].daq, :fcid, -1) - rawid::Int = chmap[k].daq.rawid - channel::ChannelId = ChannelId(rawid) - - detector::DetectorId = DetectorId(k) - det_type::Symbol = Symbol(ifelse(haskey(diodmap, k), diodmap[k].type, :unknown)) - enrichment::Unitful.Quantity{<:Measurement{<:Float64}} = if haskey(diodmap, k) && haskey(diodmap[k].production, :enrichment) measurement(diodmap[k].production.enrichment.val, diodmap[k].production.enrichment.unc) else measurement(Float64(NaN), Float64(NaN)) end *100u"percent" - mass::Unitful.Mass{<:Float64} = if haskey(diodmap, k) && haskey(diodmap[k].production, :mass_in_g) diodmap[k].production.mass_in_g else Float64(NaN) end *1e-3*u"kg" - local system::Symbol = Symbol(chmap[k].system) - processable::Bool = get(dpcfg[k], :processable, false) - usability::Symbol = Symbol(get(dpcfg[k], :usability, :unknown)) - is_blinded::Bool = get(dpcfg[k], :is_blinded, false) - low_aoe_status::Symbol = Symbol(get(get(get(dpcfg[k], :psd, PropDict()), :status, PropDict()), Symbol("low_aoe"), :unknown)) - high_aoe_status::Symbol = Symbol(get(get(get(dpcfg[k], :psd, PropDict()), :status, PropDict()), Symbol("high_aoe"), :unknown)) - lq_status::Symbol = Symbol(get(get(get(dpcfg[k], :psd, PropDict()), :status, PropDict()), Symbol("lq"), :unknown)) - batch5_dt_cut::Symbol = Symbol(get(get(get(dpcfg[k], :psd, PropDict()), :status, PropDict()), Symbol("batch5_dt_cut"), :unknown)) - is_bb_like::String = replace(get(get(dpcfg[k], :psd, PropDict()), :is_bb_like, ""), "&" => "&&") - - location::Symbol, detstring::Int, position::Int, fiber::StaticString{8} = _convert_location(chmap[k].location) - - cc4::StaticString{8} = get(chmap[k].electronics.cc4, :id, "") - cc4ch::Int = get(chmap[k].electronics.cc4, :channel, -1) - daqcrate::Int = get(chmap[k].daq, :crate, -1) - daqcard::Int = chmap[k].daq.card.id - hvcard::Int = get(chmap[k].voltage.card, :id, -1) - hvch::Int = get(chmap[k].voltage, :channel, -1) - - return (; - detector, channel, fcid, rawid, system, processable, usability, is_blinded, low_aoe_status, high_aoe_status, lq_status, batch5_dt_cut, is_bb_like, det_type, - location, detstring, fiber, position, cc4, cc4ch, daqcrate, daqcard, hvcard, hvch, enrichment, mass - ) + StructVector(make_row.(channel_keys)) end - - chinfo = StructVector(make_row.(channel_keys)) if !(system == :all) chinfo = chinfo |> filterby(@pf $system .== system) end @@ -321,7 +347,13 @@ export channelinfo function channelinfo(data::LegendData, sel::RunCategorySelLike; kwargs...) channelinfo(data, start_filekey(data, sel); kwargs...) end +channelinfo(data::LegendData, sel...; kwargs...) = channelinfo(data, sel; kwargs...) +function channelinfo(data::LegendData, sel::Tuple{<:DataPeriodLike, <:DataRunLike, <:DataCategoryLike, Union{ChannelIdLike, DetectorIdLike}}; kwargs...) + channelinfo(data, ((sel[1:3]), sel[4]); kwargs...) +end +# TODO: Fix LRU cache and make work with channelinfo and channelname +const _cached_channelinfo_detector_idx = LRU{Tuple{UInt, AnyValiditySelection, Symbol}, Int}(maxsize = 10^3) """ channelinfo(data::LegendData, sel::AnyValiditySelection, channel::Union{ChannelIdLike, DetectorIdLike}) @@ -330,12 +362,15 @@ end Get channel information validitiy selection and [`DetectorId`](@ref) resp. [`ChannelId`](@ref). """ -function channelinfo(data::LegendData, sel::Union{AnyValiditySelection, RunCategorySelLike}, channel::Union{ChannelIdLike, DetectorIdLike}; kwargs...) +# function channelinfo(data::LegendData, sel::Union{AnyValiditySelection, RunCategorySelLike}, channel::Union{ChannelIdLike, DetectorIdLike}; kwargs...) +function channelinfo(data::LegendData, sel::Tuple{Union{AnyValiditySelection, RunCategorySelLike}, Union{ChannelIdLike, DetectorIdLike}}; kwargs...) + sel, channel = sel[1], sel[2] + key = (objectid(data), sel, Symbol(channel)) chinfo = channelinfo(data, sel; kwargs...) - if _can_convert_to(ChannelId, channel) - idxs = findall(x -> ChannelId(x) == ChannelId(channel), chinfo.channel) + idxs = if _can_convert_to(ChannelId, channel) + findall(x -> ChannelId(x) == ChannelId(channel), chinfo.channel) elseif _can_convert_to(DetectorId, channel) - idxs = findall(x -> DetectorId(x) == DetectorId(channel), chinfo.detector) + findall(x -> DetectorId(x) == DetectorId(channel), chinfo.detector) else throw(ArgumentError("Invalid channel: $channel")) end @@ -343,9 +378,8 @@ function channelinfo(data::LegendData, sel::Union{AnyValiditySelection, RunCateg throw(ArgumentError("No channel information found for $channel")) elseif length(idxs) > 1 throw(ArgumentError("Multiple channel information entries for $channel")) - else - return chinfo[only(idxs)] end + chinfo[only(idxs)] end function channel_info(data::LegendData, sel::AnyValiditySelection) diff --git a/src/ljl_expressions.jl b/src/ljl_expressions.jl index d36cde00..5f0d1764 100644 --- a/src/ljl_expressions.jl +++ b/src/ljl_expressions.jl @@ -36,14 +36,19 @@ const ljl_expr_allowed_funcs = Set([ :!, :(==), :<, :>, :>=, :<=, :!=, :isapprox, :≈, :≈, + :in, :∈, :.., :+, :-, :*, :/, :^, :sqrt, :abs, :abs2, :normalize, :norm, - :exp, exp2, :exp10, :log, :log2, :log10, + :exp, :exp2, :exp10, :log, :log2, :log10, :sin, :cos, :tan, :asin, :acos, :atan, :isnan, :isinf, :isfinite, - :all, :any, :broadcast, - :± + :all, :any, :broadcast, + :get, :getproperty, + :value, :uncertainty, :stdscore, :weightedmean, + :±, + :(:), :Symbol, :String, :Int, :Float64, :Bool, + :DetectorId, :ChannelId ]) const _ljlexpr_units = IdDict([ @@ -83,6 +88,7 @@ end _process_ljlexpr_impl(x::Real, @nospecialize(f_varsubst)) = x _process_ljlexpr_impl(x::LineNumberNode, @nospecialize(f_varsubst)) = x +_process_ljlexpr_impl(x::QuoteNode, @nospecialize(f_varsubst)) = x _process_ljlexpr_impl(sym::Symbol, f_varsubst) = f_varsubst(sym) function _process_ljlexpr_impl(@nospecialize(expr::Expr), @nospecialize(f_varsubst)) @@ -142,7 +148,14 @@ function _propfrom_from_expr(pf_body) end -_pf_varsym(sym::Symbol) = Expr(:$, sym) +const _ljlexpr_numbers = IdDict([ + :NaN => :NaN, + :Inf => :Inf, + :missing => :missing, + :nothing => :nothing +]) + +_pf_varsym(sym::Symbol) = get(_ljlexpr_numbers, sym, Expr(:$, sym)) const _cached_ljl_propfunc = Dict{Union{LJlExprLike}, PropertyFunction}() const _cached_ljl_propfunc_lock = ReentrantLock() diff --git a/src/lprops.jl b/src/lprops.jl index 8ed91008..9bc1ab17 100644 --- a/src/lprops.jl +++ b/src/lprops.jl @@ -4,12 +4,12 @@ function _props2lprops(pd::PropDict) if haskey(pd, :val) && length(keys(pd)) <= 3 && (haskey(pd, :unit) || haskey(pd, :err)) if haskey(pd, :unit) if haskey(pd, :err) - Unitful.Quantity.(measurement.(pd.val, pd.err), Unitful.uparse(pd.unit)) + Unitful.Quantity.(measurement.(pd.val, ifelse(isnothing(pd.err), NaN, pd.err)), Unitful.uparse(pd.unit)) else Unitful.Quantity.(pd.val, Unitful.uparse(pd.unit)) end elseif haskey(pd, :err) - measurement.(pd.val, pd.err) + measurement.(pd.val, ifelse(isnothing(pd.err), NaN, pd.err)) else throw(ArgumentError("_props2lprops can't handle PropDict $pd")) end diff --git a/src/utils/data_utils.jl b/src/utils/data_utils.jl index 65ec65ea..437f29f6 100644 --- a/src/utils/data_utils.jl +++ b/src/utils/data_utils.jl @@ -19,6 +19,10 @@ function load_run_ch end export load_run_ch function load_run_ch(open_func::Function, flatten_func::Function, data::LegendData, filekeys::Vector{FileKey}, tier::DataTierLike, ch::ChannelIdLike; check_filekeys::Bool=true, keys::Tuple=()) + Base.depwarn( + "`load_run_ch` is deprecated, use `read_ldata` instead`.", + ((Base.Core).Typeof(load_run_ch)).name.mt.name, force=true + ) ch_filekeys = if check_filekeys @info "Check Filekeys" ch_filekeys = Vector{FileKey}() @@ -93,11 +97,19 @@ function load_raw_evt end export load_raw_evt function load_raw_evt(open_func::Function, data::LegendData, ch::ChannelIdLike, data_hit::Table, sel_evt::Int) + Base.depwarn( + "`load_raw_evt` is deprecated, use `read_ldata` instead`.", + ((Base.Core).Typeof(load_raw_evt)).name.mt.name, force=true + ) data_ch_evtIDs = open_func(data.tier[:raw, data_hit.filekey[sel_evt]])[ch].raw.eventnumber[:] open_func(data.tier[:raw, data_hit.filekey[sel_evt]])[ch].raw[findall(data_hit.eventID_fadc[sel_evt] .== data_ch_evtIDs)] end function load_raw_evt(open_func::Function, data::LegendData, ch::ChannelIdLike, data_hit::Table, sel_evt::Union{UnitRange{Int}, Vector{Int}}) + Base.depwarn( + "`load_raw_evt` is deprecated, use `read_ldata` instead`.", + ((Base.Core).Typeof(load_raw_evt)).name.mt.name, force=true + ) tbl_vec = map(unique(data_hit.filekey[sel_evt])) do fk data_ch_evtIDs = open_func(data.tier[:raw, fk])[ch].raw.eventnumber[:] idxs = reduce(vcat, broadcast(data_hit.eventID_fadc[sel_evt]) do x @@ -128,6 +140,10 @@ Load data for a channel from a partition. - `Table`: data table with flattened events """ function load_partition_ch(open_func::Function, flatten_func::Function, data::LegendData, partinfo::Table, tier::DataTierLike, cat::DataCategoryLike, ch::ChannelIdLike; data_keys::Tuple=(), n_evts::Int=-1, select_random::Bool=false) + Base.depwarn( + "`load_partition_ch` is deprecated, use `read_ldata` instead`.", + ((Base.Core).Typeof(load_partition_ch)).name.mt.name, force=true + ) @assert !isempty(partinfo) "No partition info found" @assert n_evts > 0 || n_evts == -1 "Number of events must be positive" if isempty(data_keys) diff --git a/src/utils/pars_utils.jl b/src/utils/pars_utils.jl index 065f19fb..d3202564 100644 --- a/src/utils/pars_utils.jl +++ b/src/utils/pars_utils.jl @@ -31,13 +31,13 @@ function nt2pd(pd::PropDict, nt::Union{NamedTuple, Dict, PropDict}) end """ - writevalidity(props_db::LegendDataManagement.PropsDB, filekey::FileKey; apply_to::Symbol=:all) - writevalidity(props_db::LegendDataManagement.PropsDB, filekey::FileKey, part::DataPartitionLike; apply_to::Symbol=:all) + writevalidity(props_db::LegendDataManagement.PropsDB, filekey::FileKey; category::Symbol=:all) + writevalidity(props_db::LegendDataManagement.PropsDB, filekey::FileKey, part::DataPartitionLike; category::Symbol=:all) Write validity for a given filekey. """ function writevalidity end export writevalidity -function writevalidity(props_db::LegendDataManagement.MaybePropsDB, filekey::FileKey, apply::String; apply_to::DataCategoryLike=:all) +function writevalidity(props_db::LegendDataManagement.MaybePropsDB, filekey::FileKey, apply::Vector{String}; category::DataCategoryLike=:all) # write validity # get timestamp from filekey pars_validTimeStamp = string(filekey.time) @@ -46,33 +46,34 @@ function writevalidity(props_db::LegendDataManagement.MaybePropsDB, filekey::Fil mkpath(dirname(validity_filename)) touch(validity_filename) # check if validity already written - validity_entry = "{\"valid_from\":\"$pars_validTimeStamp\", \"category\":\"$(string(apply_to))\", \"apply\":[\"$(apply)\"]}" validity_lines = readlines(validity_filename) - is_validity = findfirst(x -> contains(x, "$pars_validTimeStamp"), validity_lines) - if isnothing(is_validity) - @info "Write validity for $pars_validTimeStamp" - open(validity_filename, "a") do io - println(io, validity_entry) - end - else - @info "Delete old $pars_validTimeStamp validity entry" - validity_lines[is_validity] = validity_entry - open(validity_filename, "w") do io - for line in sort(validity_lines) - println(io, line) - end + # check if given validity already exists + is_validity = findall(x -> contains(x, "$pars_validTimeStamp") && contains(x, "$(string(category))"), validity_lines) + if isempty(is_validity) + @info "Write new validity for $pars_validTimeStamp" + push!(validity_lines, "{\"valid_from\":\"$pars_validTimeStamp\", \"category\":\"$(string(category))\", \"apply\":[\"$(join(sort(apply), "\", \""))\"]}") + elseif length(is_validity) == 1 + @info "Merge old $pars_validTimeStamp $(string(category)) validity entry" + apply = unique(append!(Vector{String}(JSON.parse(validity_lines[first(is_validity)])["apply"]), apply)) + validity_lines[first(is_validity)] = "{\"valid_from\":\"$pars_validTimeStamp\", \"category\":\"$(string(category))\", \"apply\":[\"$(join(sort(apply), "\", \""))\"]}" + end + # write validity + open(validity_filename, "w") do io + for line in sort(validity_lines) + println(io, line) end end end -writevalidity(props_db, filekey, rsel::RunSelLike; kwargs...) = writevalidity(props_db, filekey, "$(first(rsel))/$(last(rsel)).json"; kwargs...) -writevalidity(props_db, filekey, part::DataPartitionLike; kwargs...) = writevalidity(props_db, filekey, "$(part).json"; kwargs...) +writevalidity(props_db, filekey, apply::String; kwargs...) = writevalidity(props_db, filekey, [apply]; kwargs...) +writevalidity(props_db, filekey, rsel::Tuple{DataPeriod, DataRun}; kwargs...) = writevalidity(props_db, filekey, "$(first(rsel))/$(last(rsel)).json"; kwargs...) +writevalidity(props_db, filekey, part::DataPartition; kwargs...) = writevalidity(props_db, filekey, "$(part).json"; kwargs...) function writevalidity(props_db::LegendDataManagement.MaybePropsDB, validity::StructVector{@NamedTuple{period::DataPeriod, run::DataRun, filekey::FileKey, validity::String}}; kwargs...) # get unique runs and periods for the individual entries runsel = unique([(row.period, row.run) for row in validity]) # write validity for each run and period for (period, run) in runsel val = filter(row -> row.period == period && row.run == run, validity) - writevalidity(props_db, first(val.filekey), join(val.validity, "\", \""); kwargs...) + writevalidity(props_db, first(val.filekey), val.validity; kwargs...) end end diff --git a/src/utils/utils.jl b/src/utils/utils.jl index 1c07e82b..12d7024f 100644 --- a/src/utils/utils.jl +++ b/src/utils/utils.jl @@ -10,6 +10,7 @@ using PropertyDicts using StructArrays using Unitful using Format +using JSON using Dates using Measurements using Measurements: ± diff --git a/test/test_ext_ssd.jl b/test/test_ext_ssd.jl index 9ac5b638..a4affa05 100644 --- a/test/test_ext_ssd.jl +++ b/test/test_ext_ssd.jl @@ -3,11 +3,36 @@ using LegendDataManagement using Test -using SolidStateDetectors:SolidStateDetector +using SolidStateDetectors +using SolidStateDetectors: ConstantImpurityDensity +using Unitful +using LegendHDF5IO include("testing_utils.jl") @testset "test_ext_ssd" begin l200 = LegendData(:l200) - @test SolidStateDetector(l200, :V99000A) isa SolidStateDetector + for detname in (:V99000A, :B99000A, :C99000A, :P99000A) + @testset "$(detname)" begin + det = SolidStateDetector{Float64}(l200, detname) + @test det isa SolidStateDetector + sim = Simulation{Float64}(l200, detname) + @test sim isa Simulation + + # Compare active volume from SSD to active volume from LegendDataManagement + SolidStateDetectors.apply_initial_state!(sim, ElectricPotential, Grid(sim, max_tick_distance = 0.1u"mm")) + active_volume_ssd = SolidStateDetectors.get_active_volume(sim.point_types) + active_volume_ldm = LegendDataManagement.get_active_volume(l200.metadata.hardware.detectors.germanium.diodes[Symbol(detname)], 0.0) + @test isapprox(active_volume_ssd, active_volume_ldm, rtol = 0.01) + + # The creation via config files allows to save Simulations to files using LegendHDF5IO + lh5name = "$(detname).lh5" + isfile(lh5name) && rm(lh5name) + @test_nowarn ssd_write(lh5name, sim) + @test isfile(lh5name) + @test sim == ssd_read(lh5name, Simulation) + @test_nowarn rm(lh5name) + @test !isfile(lh5name) + end + end end diff --git a/test/test_legend_data.jl b/test/test_legend_data.jl index 828b0722..907064f3 100644 --- a/test/test_legend_data.jl +++ b/test/test_legend_data.jl @@ -24,11 +24,23 @@ include("testing_utils.jl") @test l200.metadata isa LegendDataManagement.PropsDB # ToDo: Make type-stable: - @test (channelinfo(l200, filekey)) isa TypedTables.Table + @test channelinfo(l200, filekey) isa TypedTables.Table chinfo = channelinfo(l200, filekey) @test all(filterby(@pf $processable && $usability == :on)(chinfo).processable) @test all(filterby(@pf $processable && $usability == :on)(chinfo).usability .== :on) + # Delete the channelinfo cache + empty!(LegendDataManagement._cached_channelinfo) + + # Test the extended channel info with active volume calculation + extended = channelinfo(l200, filekey, extended = true) + @test extended isa TypedTables.Table + + # Check that some keywords only appear in the extended channelinfo + extended_keywords = (:cc4, :cc4ch, :daqcrate, :daqcard, :hvcard, :hvch, :enrichment, :mass, :total_volume, :active_volume) + @test !any(in(columnnames(chinfo)), extended_keywords) + @test all(in(columnnames(extended)), extended_keywords) + # ToDo: Make type-stable: # @test #=@inferred=#(channel_info(l200, filekey)) isa StructArray # chinfo = channel_info(l200, filekey)