diff --git a/doc/Galacticus.bib b/doc/Galacticus.bib
index 797db6b030..68057317f5 100644
--- a/doc/Galacticus.bib
+++ b/doc/Galacticus.bib
@@ -4441,6 +4441,17 @@ @article{kummer_effective_2018
pages = {388--399}
}
+@misc{kiwiakos_answer_2014,
+ title = {Answer to "{Brownian} {Bridge}'s first passage time distribution"},
+ url = {https://quant.stackexchange.com/a/14219},
+ urldate = {2023-09-28},
+ journal = {Quantitative Finance Stack Exchange},
+ author = {Kiwiakos},
+ month = jul,
+ year = {2014},
+ file = {Snapshot:/home/abensonca/.mozilla/firefox/f54gqgdx.default/zotero/storage/RQ6Q725B/14219.html:text/html},
+}
+
@article{lacey_merger_1993,
title = {Merger rates in hierarchical models of galaxy formation},
volume = {262},
diff --git a/source/merger_trees.branching_probability.generalized_Press_Schechter.F90 b/source/merger_trees.branching_probability.generalized_Press_Schechter.F90
index 6bcf700aab..46204815a6 100644
--- a/source/merger_trees.branching_probability.generalized_Press_Schechter.F90
+++ b/source/merger_trees.branching_probability.generalized_Press_Schechter.F90
@@ -71,10 +71,10 @@
type (rootFinder ) :: finder
type (integrator ) :: integrator_ , integratorSubresolution_
! Parent halo shared variables.
- double precision :: parentDTimeDDeltaCritical , parentDelta , &
- & parentHaloMass , parentSigma , &
- & parentSigmaSquared , parentTime , &
- & probabilityMinimumMass , probabilitySeek , &
+ double precision :: parentDTimeDDeltaCritical , parentDelta , &
+ & parentHaloMass , parentSigma , &
+ & parentSigmaSquared , parentTime , &
+ & probabilityMinimumMass , probabilitySeek , &
& normalization
! Record of mass resolution.
@@ -92,7 +92,7 @@
! Record of issued warnings.
logical :: subresolutionFractionIntegrandFailureWarned
! Option controlling whether only lower-half of the distribution function should be used.
- logical :: distributionFunctionLowerHalfOnly
+ logical :: distributionFunctionLowerHalfOnly , distributionFunctionNormalize
! Minimum mass to which subresolution fractions will be integrated.
double precision :: massMinimum
! Current epoch.
@@ -150,7 +150,8 @@ function generalizedPressSchechterConstructorParameters(parameters) result(self)
class (excursionSetFirstCrossingClass ), pointer :: excursionSetFirstCrossing_
class (mergerTreeBranchingProbabilityModifierClass ), pointer :: mergerTreeBranchingProbabilityModifier_
double precision :: deltaStepMaximum , massMinimum
- logical :: smoothAccretion , distributionFunctionLowerHalfOnly
+ logical :: smoothAccretion , distributionFunctionLowerHalfOnly, &
+ & distributionFunctionNormalize
!![
@@ -177,13 +178,24 @@ function generalizedPressSchechterConstructorParameters(parameters) result(self)
If true, only the lower half ($M < M_0/2$) of the branching rate distribution function is used, as per the algorithm of \cite{cole_hierarchical_2000}.
+
+ distributionFunctionNormalize
+ .true.
+
+ If using the full range ($M < M_0$) of the branching rate distribution, if this parameter is {\normalfont \ttfamily
+ true} then divide the branching rate by 2. This is appropriate if two progenitors are to be sampled (i.e. a binary
+ split). If the branching rate applies to only a single branch is it more appropriate to set this parameter to be
+ {\normalfont \ttfamily true} in which case this normalization by a factor 2 is \emph{not} applied.
+
+
+
!!]
- self=mergerTreeBranchingProbabilityGnrlzdPrssSchchtr(deltaStepMaximum,massMinimum,smoothAccretion,distributionFunctionLowerHalfOnly,cosmologyFunctions_,criticalOverdensity_,cosmologicalMassVariance_,excursionSetFirstCrossing_,mergerTreeBranchingProbabilityModifier_)
+ self=mergerTreeBranchingProbabilityGnrlzdPrssSchchtr(deltaStepMaximum,massMinimum,smoothAccretion,distributionFunctionLowerHalfOnly,distributionFunctionNormalize,cosmologyFunctions_,criticalOverdensity_,cosmologicalMassVariance_,excursionSetFirstCrossing_,mergerTreeBranchingProbabilityModifier_)
!![
@@ -195,7 +207,7 @@ If true, only the lower half ($M < M_0/2$) of the branching rate
return
end function generalizedPressSchechterConstructorParameters
- function generalizedPressSchechterConstructorInternal(deltaStepMaximum,massMinimum,smoothAccretion,distributionFunctionLowerHalfOnly,cosmologyFunctions_,criticalOverdensity_,cosmologicalMassVariance_,excursionSetFirstCrossing_,mergerTreeBranchingProbabilityModifier_) result(self)
+ function generalizedPressSchechterConstructorInternal(deltaStepMaximum,massMinimum,smoothAccretion,distributionFunctionLowerHalfOnly,distributionFunctionNormalize,cosmologyFunctions_,criticalOverdensity_,cosmologicalMassVariance_,excursionSetFirstCrossing_,mergerTreeBranchingProbabilityModifier_) result(self)
!!{
Internal constructor for the \cite{cole_hierarchical_2000} merger tree building class.
!!}
@@ -208,10 +220,11 @@ function generalizedPressSchechterConstructorInternal(deltaStepMaximum,massMinim
class (excursionSetFirstCrossingClass ), intent(in ), target :: excursionSetFirstCrossing_
class (mergerTreeBranchingProbabilityModifierClass ), intent(in ), target :: mergerTreeBranchingProbabilityModifier_
double precision , intent(in ) :: deltaStepMaximum , massMinimum
- logical , intent(in ) :: smoothAccretion , distributionFunctionLowerHalfOnly
+ logical , intent(in ) :: smoothAccretion , distributionFunctionLowerHalfOnly , &
+ & distributionFunctionNormalize
double precision , parameter :: toleranceAbsolute =0.0d+0, toleranceRelative =1.0d-9
!![
-
+
!!]
self%excursionSetsTested =.false.
@@ -307,12 +320,16 @@ double precision function generalizedPressSchechterMassBranch(self,haloMass,delt
call self%computeCommonFactors(node,haloMass,deltaCritical,time)
! Determine the upper mass limit to use.
if (self%distributionFunctionLowerHalfOnly) then
- massUpper =+0.5d0*haloMass
- self%normalization=+1.0d0
+ massUpper =+0.5d0*haloMass
+ self%normalization =+1.0d0
else
- massUpper =+ haloMass
- self%normalization=+0.5d0
- end if
+ massUpper =+ haloMass
+ if (self%distributionFunctionNormalize) then
+ self%normalization=+0.5d0
+ else
+ self%normalization=+1.0d0
+ end if
+ end if
! Check that the root is bracketed.
rootFunctionLower=generalizedPressSchechterMassBranchRoot(massResolution)
rootFunctionUpper=generalizedPressSchechterMassBranchRoot(massUpper )
@@ -451,11 +468,15 @@ double precision function generalizedPressSchechterProbability(self,haloMass,del
call self%computeCommonFactors(node,haloMass,deltaCritical,time)
massMinimum=massResolution
if (self%distributionFunctionLowerHalfOnly) then
- massMaximum =+0.5d0*self%parentHaloMass
- normalization=+1.0d0
+ massMaximum =+0.5d0*self%parentHaloMass
+ normalization =+1.0d0
else
- massMaximum =+ self%parentHaloMass
- normalization=+0.5d0
+ massMaximum =+ self%parentHaloMass
+ if (self%distributionFunctionNormalize) then
+ normalization=+0.5d0
+ else
+ normalization=+1.0d0
+ end if
end if
self_ => self
generalizedPressSchechterProbability = +normalization &
diff --git a/source/merger_trees.build.controller.constrained.F90 b/source/merger_trees.build.controller.constrained.F90
index 33ff319ca2..96ae09321f 100644
--- a/source/merger_trees.build.controller.constrained.F90
+++ b/source/merger_trees.build.controller.constrained.F90
@@ -110,7 +110,7 @@ function constrainedConstructorParameters(parameters) result(self)
labelvar_str(' ')
- A label to apply to constrained node.
+ A label to apply to the constrained node.
!!]
if (label == '') label=' '
diff --git a/source/merger_trees.build.controller.filtered.F90 b/source/merger_trees.build.controller.filtered.F90
new file mode 100644
index 0000000000..b29854453d
--- /dev/null
+++ b/source/merger_trees.build.controller.filtered.F90
@@ -0,0 +1,157 @@
+!! Copyright 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018,
+!! 2019, 2020, 2021, 2022, 2023
+!! Andrew Benson
+!!
+!! This file is part of Galacticus.
+!!
+!! Galacticus is free software: you can redistribute it and/or modify
+!! it under the terms of the GNU General Public License as published by
+!! the Free Software Foundation, either version 3 of the License, or
+!! (at your option) any later version.
+!!
+!! Galacticus is distributed in the hope that it will be useful,
+!! but WITHOUT ANY WARRANTY; without even the implied warranty of
+!! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+!! GNU General Public License for more details.
+!!
+!! You should have received a copy of the GNU General Public License
+!! along with Galacticus. If not, see .
+
+!!{
+Contains a module which implements a merger tree build controller class which follows branches only if they pass a filter.
+!!}
+
+ use :: Galactic_Filters, only : galacticFilterClass
+
+ !![
+
+ A merger tree build controller class which builds filtered trees.
+
+ !!]
+ type, extends(mergerTreeBuildControllerClass) :: mergerTreeBuildControllerFiltered
+ !!{
+ A merger tree build controller class which builds filtered trees. Specifically, only nodes which pass a provided filtered are
+ followed.
+ !!}
+ private
+ class(mergerTreeBranchingProbabilityClass), pointer :: mergerTreeBranchingProbability_ => null()
+ class(galacticFilterClass ), pointer :: galacticFilter_ => null()
+ contains
+ final :: filteredDestructor
+ procedure :: control => filteredControl
+ procedure :: branchingProbabilityObject => filteredBranchingProbabilityObject
+ procedure :: nodesInserted => filteredNodesInserted
+ end type mergerTreeBuildControllerFiltered
+
+ interface mergerTreeBuildControllerFiltered
+ !!{
+ Constructors for the ``filtered'' merger tree build controller class.
+ !!}
+ module procedure filteredConstructorParameters
+ module procedure filteredConstructorInternal
+ end interface mergerTreeBuildControllerFiltered
+
+contains
+
+ function filteredConstructorParameters(parameters) result(self)
+ !!{
+ Constructor for the ``filtered'' merger tree build controller class which takes a parameter set as input.
+ !!}
+ use :: Input_Parameters, only : inputParameter, inputParameters
+ implicit none
+ type (mergerTreeBuildControllerFiltered ) :: self
+ type (inputParameters ), intent(inout) :: parameters
+ class(mergerTreeBranchingProbabilityClass), pointer :: mergerTreeBranchingProbability_
+ class(galacticFilterClass ), pointer :: galacticFilter_
+
+ !![
+
+
+ !!]
+ self=mergerTreeBuildControllerFiltered(mergerTreeBranchingProbability_,galacticFilter_)
+ !![
+
+
+ !!]
+ return
+ end function filteredConstructorParameters
+
+ function filteredConstructorInternal(mergerTreeBranchingProbability_,galacticFilter_) result(self)
+ !!{
+ Internal constructor for the ``filtered'' merger tree build controller class .
+ !!}
+ implicit none
+ type (mergerTreeBuildControllerFiltered ) :: self
+ class(mergerTreeBranchingProbabilityClass), intent(in ), target :: mergerTreeBranchingProbability_
+ class(galacticFilterClass ), intent(in ), target :: galacticFilter_
+ !![
+
+ !!]
+
+ return
+ end function filteredConstructorInternal
+
+ subroutine filteredDestructor(self)
+ !!{
+ Destructor for the {\normalfont \ttfamily filtered} merger tree build controller class.
+ !!}
+ implicit none
+ type(mergerTreeBuildControllerFiltered), intent(inout) :: self
+
+ !![
+
+
+ !!]
+ return
+ end subroutine filteredDestructor
+
+ logical function filteredControl(self,node,treeWalker_)
+ !!{
+ Skip side branches of a tree under construction.
+ !!}
+ implicit none
+ class(mergerTreeBuildControllerFiltered), intent(inout) :: self
+ type (treeNode ), intent(inout), pointer :: node
+ class(mergerTreeWalkerClass ), intent(inout), optional :: treeWalker_
+ !$GLC attributes unused :: self
+
+ filteredControl=.true.
+ ! Move to the next node in the tree while such exists, and the current node does not pass the filter.
+ do while (filteredControl.and..not.self%galacticFilter_%passes(node))
+ if (present(treeWalker_)) then
+ filteredControl=treeWalker_%next(node)
+ else
+ filteredControl=.false.
+ end if
+ end do
+ return
+ end function filteredControl
+
+ function filteredBranchingProbabilityObject(self,node) result(mergerTreeBranchingProbability_)
+ !!{
+ Return a pointer the the merger tree branching probability object to use.
+ !!}
+ implicit none
+ class(mergerTreeBranchingProbabilityClass), pointer :: mergerTreeBranchingProbability_
+ class(mergerTreeBuildControllerFiltered ), intent(inout) :: self
+ type (treeNode ), intent(inout) :: node
+ !$GLC attributes unused :: node
+
+ mergerTreeBranchingProbability_ => self%mergerTreeBranchingProbability_
+ return
+ end function filteredBranchingProbabilityObject
+
+ subroutine filteredNodesInserted(self,nodeCurrent,nodeProgenitor1,nodeProgenitor2,didBranch)
+ !!{
+ Act on the insertion of nodes into the merger tree.
+ !!}
+ implicit none
+ class (mergerTreeBuildControllerFiltered), intent(inout) :: self
+ type (treeNode ), intent(inout) :: nodeCurrent , nodeProgenitor1
+ type (treeNode ), intent(inout), optional :: nodeProgenitor2
+ logical , intent(in ), optional :: didBranch
+ !$GLC attributes unused :: self, nodeCurrent, nodeProgenitor1, nodeProgenitor2, didBranch
+
+ ! Nothing to do.
+ return
+ end subroutine filteredNodesInserted
diff --git a/source/merger_trees.build.controller.single_step.F90 b/source/merger_trees.build.controller.single_step.F90
index c3a07d4de8..7d24b0ebe1 100644
--- a/source/merger_trees.build.controller.single_step.F90
+++ b/source/merger_trees.build.controller.single_step.F90
@@ -42,7 +42,7 @@
double precision :: redshiftStep , criticalOverdensityStep
logical :: haltAfterStep
integer :: primaryLabelID , secondaryLabelID , &
- & smoothLabelID
+ & smoothLabelID , singleLabelID
contains
final :: singleStepDestructor
procedure :: control => singleStepControl
@@ -134,9 +134,10 @@ function singleStepConstructorInternal(criticalOverdensityStep,haltAfterStep,cos
& ) &
& ) &
& )
- self% primaryLabelID=nodeLabelRegister('progenitorFirst' ,'Identifies progenitors that were sampled first from the progenitor mass distribution.' )
- self%secondaryLabelID=nodeLabelRegister('progenitorSecond','Identifies progenitors that were sampled second from the progenitor mass distribution.')
- self% smoothLabelID=nodeLabelRegister('progenitorSmooth','Identifies progenitors resulting from smooth/sub-resolution accretion.' )
+ self% primaryLabelID=nodeLabelRegister('progenitorFirst' ,'Identifies progenitors that were sampled first from the progenitor mass distribution.' )
+ self%secondaryLabelID=nodeLabelRegister('progenitorSecond','Identifies progenitors that were sampled second from the progenitor mass distribution.' )
+ self% smoothLabelID=nodeLabelRegister('progenitorSmooth','Identifies progenitors resulting from smooth/sub-resolution accretion.' )
+ self% singleLabelID=nodeLabelRegister('progenitorSingle','Identifies progenitors on a single branch all of which were sampled first from the progenitor mass distribution.')
return
end function singleStepConstructorInternal
@@ -160,15 +161,19 @@ logical function singleStepControl(self,node,treeWalker_)
!!{
Apply control to merger tree building.
!!}
+ use :: Nodes_Labels, only : nodeLabelSet
implicit none
class(mergerTreeBuildControllerSingleStep), intent(inout) :: self
type (treeNode ), intent(inout), pointer :: node
class(mergerTreeWalkerClass ), intent(inout), optional :: treeWalker_
+ ! Mark the root halo as being on the single branch.
+ if (associated(node) .and. .not.associated(node%parent)) &
+ & call nodeLabelSet(self%singleLabelID,node)
! First call the decorated controller.
singleStepControl=self%mergerTreeBuildController_%control(node,treeWalker_)
! If we are to halt after the first step, then override the decorated controller as necessary.
- if (self%haltAfterStep .and. associated(node%parent)) singleStepControl=.false.
+ if (self%haltAfterStep .and. associated(node) .and. associated(node%parent)) singleStepControl=.false.
return
end function singleStepControl
@@ -213,7 +218,7 @@ logical function singleStepControlTimeMaximum(self,node,massBranch,criticalOverd
use :: Error , only : Error_Report
use :: Galacticus_Nodes, only : nodeComponentBasic
use :: Kind_Numbers , only : kind_int8
- use :: Nodes_Labels , only : nodeLabelSet
+ use :: Nodes_Labels , only : nodeLabelSet , nodeLabelIsPresent
implicit none
class (mergerTreeBuildControllerSingleStep), intent(inout) :: self
type (treeNode ), intent(inout), target :: node
@@ -241,7 +246,9 @@ logical function singleStepControlTimeMaximum(self,node,massBranch,criticalOverd
nodeNew %sibling => null()
call basic%massSet(massBranch )
call basic%timeSet(criticalOverdensityBranch)
- call nodeLabelSet(self%smoothLabelID,nodeNew)
+ call nodeLabelSet(self%smoothLabelID,nodeNew)
+ if (nodeLabelIsPresent(self%singleLabelID,node)) &
+ & call nodeLabelSet(self%singleLabelID,nodeNew)
! Return false indicating that the current node is finished, so building should continue from its progenitor nodes.
singleStepControlTimeMaximum=.false.
return
@@ -264,7 +271,7 @@ subroutine singleStepNodesInserted(self,nodeCurrent,nodeProgenitor1,nodeProgenit
!!{
Act on the insertion of nodes into the merger tree.
!!}
- use :: Nodes_Labels, only : nodeLabelSet
+ use :: Nodes_Labels, only : nodeLabelSet, nodeLabelIsPresent
implicit none
class (mergerTreeBuildControllerSingleStep), intent(inout) :: self
type (treeNode ), intent(inout) :: nodeCurrent , nodeProgenitor1
@@ -282,5 +289,7 @@ subroutine singleStepNodesInserted(self,nodeCurrent,nodeProgenitor1,nodeProgenit
else
call nodeLabelSet(self% smoothLabelID,nodeProgenitor1)
end if
+ if (nodeLabelIsPresent(self%singleLabelID,nodeCurrent)) &
+ & call nodeLabelSet(self%singleLabelID,nodeProgenitor1)
return
end subroutine singleStepNodesInserted
diff --git a/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.F90 b/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.F90
index 6267ce1ac2..8e4c5f0519 100644
--- a/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.F90
+++ b/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.F90
@@ -1589,7 +1589,7 @@ double precision function farahiVarianceLimit(self,varianceProgenitor)
return
end function farahiVarianceLimit
- function farahiVarianceResidual(self,time,varianceCurrent,varianceIntermediate,varianceProgenitor,cosmologicalMassVariance_) result(varianceResidual)
+ function farahiVarianceResidual(self,time,varianceCurrent,varianceProgenitor,varianceIntermediate,cosmologicalMassVariance_) result(varianceResidual)
!!{
Return the residual variance between two points for a standard Weiner process.
!!}
@@ -1633,12 +1633,12 @@ function farahiVarianceResidual(self,time,varianceCurrent,varianceIntermediate,v
! "Current" - refers to the current halo being considered for branching, i.e. the halo existing at point (S₁,δ₁);
! "Progenitor" - refers to the potential progenitor halo being considered, i.e. the halo corresponding to some variance S > S₁;
! "Intermediate" - refers to the intermediate variance, S̃ (with S₁ < S̃ < S).
- varianceResidual=+varianceIntermediate &
- & -varianceProgenitor
+ varianceResidual=+varianceProgenitor &
+ & -varianceIntermediate
return
end function farahiVarianceResidual
- function farahiOffsetEffective(self,time,varianceCurrent,varianceIntermediate,varianceProgenitor,deltaCurrent,deltaIntermediate,deltaProgenitor,cosmologicalMassVariance_) result(offsetEffective)
+ function farahiOffsetEffective(self,time,varianceCurrent,varianceProgenitor,varianceIntermediate,deltaCurrent,deltaProgenitor,deltaIntermediate,cosmologicalMassVariance_) result(offsetEffective)
!!{
Return the residual variance between two points for a standard Weiner process.
!!}
@@ -1680,7 +1680,7 @@ function farahiOffsetEffective(self,time,varianceCurrent,varianceIntermediate,va
! "Current" - refers to the current halo being considered for branching, i.e. the halo existing at point (S₁,δ₁);
! "Progenitor" - refers to the potential progenitor halo being considered, i.e. the halo corresponding to some variance S > S₁;
! "Intermediate" - refers to the intermediate variance, S̃ (with S₁ < S̃ < S).
- offsetEffective=+deltaIntermediate &
- & -deltaProgenitor
+ offsetEffective=+deltaProgenitor &
+ & -deltaIntermediate
return
end function farahiOffsetEffective
diff --git a/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.midpoint.Brownian_bridge.F90 b/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.midpoint.Brownian_bridge.F90
index 6db4864a67..79e06751a1 100644
--- a/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.midpoint.Brownian_bridge.F90
+++ b/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.midpoint.Brownian_bridge.F90
@@ -76,6 +76,19 @@
\end{equation}
In these cases we still use the residual variance since $\Delta S \rightarrow \mathrm{Var}(S)$ as $\tilde{S} \rightarrow S_1$.
+ When computing the distribution, $p(\delta,s)$, of trajectories at variance $S$, given that the first crossed the barrier,
+ $B(\tilde{S})$, at some smaller variance, $\tilde{S}$, (equation A2 of \citealt{benson_dark_2012}) we must condition
+ \emph{both} the residual variance and drift term on the intermediate point, $\tilde{S},B(\tilde{S})$. Fortunately, given any
+ Brownian random walk (including Brownian bridges) for which we know two points, the distribution of trajectories between
+ those points is simply another Brownian bridge. Therefore, we can write:
+ \begin{equation}
+ \Delta \delta = \delta - \tilde{\delta} - \frac{S-\tilde{S}}{S_2-\tilde{S}}(\delta_2-\tilde{\delta}),
+ \end{equation}
+ and:
+ \begin{equation}
+ \Delta S = \frac{(S_2-S)(S-\tilde{S})}{S_2-\tilde{S}}.
+ \end{equation}
+
This class provides functions implementing these modified effective offset and residual variance.
@@ -271,7 +284,7 @@ double precision function farahiMidpointBrownianBridgeRate(self,variance,varianc
varianceConstrained =+rootVarianceConstrained**2
criticalOverdensityConstrained=+self%criticalOverdensityConstrained &
& *sqrt( &
- & +varianceConstrained &
+ & + varianceConstrained &
& /self%varianceConstrained &
& )
! Determine whether to use the conditioned or unconditioned solutions.
@@ -329,14 +342,17 @@ double precision function farahiMidpointBrownianBridgeVarianceLimit(self,varianc
class (excursionSetFirstCrossingFarahiMidpointBrownianBridge), intent(inout) :: self
double precision , intent(in ) :: varianceProgenitor
- farahiMidpointBrownianBridgeVarianceLimit=min( &
- & self%excursionSetFirstCrossingFarahiMidpoint%varianceLimit (varianceProgenitor), &
- & self %varianceConstrained &
+ farahiMidpointBrownianBridgeVarianceLimit=min( &
+ & max( &
+ & self%varianceMaximumRate, &
+ & 2.0d0* varianceProgenitor &
+ & ) , &
+ & self%varianceConstrained &
& )
return
end function farahiMidpointBrownianBridgeVarianceLimit
- function farahiMidpointBrownianBridgeVarianceResidual(self,time,varianceCurrent,varianceIntermediate,varianceProgenitor,cosmologicalMassVariance_) result(varianceResidual)
+ function farahiMidpointBrownianBridgeVarianceResidual(self,time,varianceCurrent,varianceProgenitor,varianceIntermediate,cosmologicalMassVariance_) result(varianceResidual)
!!{
Return the residual variance between two points for a Brownian bridge.
!!}
@@ -365,8 +381,8 @@ function farahiMidpointBrownianBridgeVarianceResidual(self,time,varianceCurrent,
!
! S₂ = varianceConstrained
! S₁ = varianceCurrent
- ! S̃ = varianceProgenitor +varianceCurrent
- ! S = varianceIntermediate+varianceCurrent
+ ! S̃ = varianceIntermediate+varianceCurrent
+ ! S = varianceProgenitor +varianceCurrent
!
! Note that the variables "varianceIntermediate" and "varianceProgenitor" are defined to be the variances in excess of S₁ - which is why they
! appear with "varianceCurrent" added to them in the above.
@@ -386,13 +402,13 @@ function farahiMidpointBrownianBridgeVarianceResidual(self,time,varianceCurrent,
rootVarianceConstrained=+cosmologicalMassVariance_%rootVariance(self%massConstrained,time)
varianceConstrained =+rootVarianceConstrained**2
! Compute the residual variance.
- varianceResidual =+(varianceConstrained -varianceIntermediate-varianceCurrent) &
- & *(varianceIntermediate-varianceProgenitor ) &
- & /(varianceConstrained -varianceCurrent)
+ varianceResidual =+(varianceConstrained-varianceProgenitor -varianceCurrent) &
+ & *(varianceProgenitor -varianceIntermediate ) &
+ & /(varianceConstrained-varianceIntermediate-varianceCurrent)
return
end function farahiMidpointBrownianBridgeVarianceResidual
- function farahiMidpointBrownianBridgeOffsetEffective(self,time,varianceCurrent,varianceIntermediate,varianceProgenitor,deltaCurrent,deltaIntermediate,deltaProgenitor,cosmologicalMassVariance_) result(offsetEffective)
+ function farahiMidpointBrownianBridgeOffsetEffective(self,time,varianceCurrent,varianceProgenitor,varianceIntermediate,deltaCurrent,deltaProgenitor,deltaIntermediate,cosmologicalMassVariance_) result(offsetEffective)
!!{
Return the residual variance between two points for a Brownian bridge.
!!}
@@ -423,12 +439,12 @@ function farahiMidpointBrownianBridgeOffsetEffective(self,time,varianceCurrent,v
!
! S₂ = varianceConstrained
! S₁ = varianceCurrent
- ! S̃ = varianceProgenitor +varianceCurrent
- ! S = varianceIntermediate+varianceCurrent
+ ! S̃ = varianceIntermediate+varianceCurrent
+ ! S = varianceProgenitor +varianceCurrent
! δ₂ = criticalOverdensityConstrained
! δ₁ = deltaCurrent
- ! δ̃ = deltaProgenitor +deltaCurrent
- ! δ = deltaIntermediate +deltaCurrent
+ ! δ̃ = deltaIntermediate +deltaCurrent
+ ! δ = deltaProgenitor +deltaCurrent
!
! Note that the variables "varianceIntermediate" and "varianceProgenitor" are defined to be the variances in excess of S₁ - which is why they
! appear with "varianceCurrent" added to them in the above. Similarly, "deltaIntermediate" and "deltaProgenitor" are defined relative to "deltaCurrent".
@@ -445,17 +461,22 @@ function farahiMidpointBrownianBridgeOffsetEffective(self,time,varianceCurrent,v
! variances used throughout the excursion set solver code grow in time according to linear perturbation theory. The fixed
! quantity in our Brownian bridge is the mass and time of the halo at the end of the bridge. Therefore, we must compute the
! variance corresponding to this mass at the requested epoch.
+ !
+ ! Also note that the drift term is always computed from the difference between the constraint point and the intermediate
+ ! point. This is because, for a trajectory having arrived at the intermediate point, the set of all possible trajectories that
+ ! take it from there to the constrained point is precisely a Brownian bridge starting from the intermediate point and ending
+ ! at the constraint point.
rootVarianceConstrained =+cosmologicalMassVariance_%rootVariance(self%massConstrained,time)
varianceConstrained =+rootVarianceConstrained**2
- criticalOverdensityConstrained=+self%criticalOverdensityConstrained &
- & *sqrt( &
- & +varianceConstrained &
- & /self%varianceConstrained &
+ criticalOverdensityConstrained=+self%criticalOverdensityConstrained &
+ & *sqrt( &
+ & + varianceConstrained &
+ & /self%varianceConstrained &
& )
- offsetEffective =+(+deltaIntermediate -deltaProgenitor ) &
- & -(+criticalOverdensityConstrained-deltaCurrent ) &
- & *(+varianceIntermediate -varianceProgenitor) &
- & /(+varianceConstrained -varianceCurrent )
+ offsetEffective =+(+deltaProgenitor -deltaIntermediate ) &
+ & -(+criticalOverdensityConstrained-deltaIntermediate -deltaCurrent ) &
+ & *(+varianceProgenitor -varianceIntermediate ) &
+ & /(+varianceConstrained -varianceIntermediate-varianceCurrent)
return
end function farahiMidpointBrownianBridgeOffsetEffective
diff --git a/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.midpoint.F90 b/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.midpoint.F90
index ceec92c49a..ad18ba6eef 100644
--- a/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.midpoint.F90
+++ b/source/structure_formation.excursion_sets.first_crossing_distribution.Farahi.midpoint.F90
@@ -790,9 +790,9 @@ subroutine farahiMidpointRateTabulate(self,varianceProgenitor,time,node)
if (varianceProgenitorRateQuad(1)+varianceCurrentRateQuad(iVariance) >= varianceMaximumRateLimit) then
firstCrossingRateQuad(1)= 0.0_kind_quad
else
- offsetEffective =self%offsetEffective (self%timeRate(iTime),varianceCurrentRateQuad(iVariance),varianceProgenitorRateQuad(1),varianceMidpointRateQuad(1),0.0_kind_quad,barrierRateQuad(1),barrierMidpointRateQuad(1),cosmologicalMassVariance_)
- varianceResidual =self%varianceResidual(self%timeRate(iTime),varianceCurrentRateQuad(iVariance),varianceProgenitorRateQuad(1),varianceMidpointRateQuad(1) ,cosmologicalMassVariance_)
- integralKernelRate_=integralKernelRate(varianceResidual,offsetEffective)
+ offsetEffective =+self%offsetEffective (self%timeRate(iTime),varianceCurrentRateQuad(iVariance),varianceProgenitorRateQuad(1),varianceMidpointRateQuad(1),0.0_kind_quad,barrierRateQuad(1),barrierMidpointRateQuad(1),cosmologicalMassVariance_)
+ varianceResidual =+self%varianceResidual(self%timeRate(iTime),varianceCurrentRateQuad(iVariance),varianceProgenitorRateQuad(1),varianceMidpointRateQuad(1) ,cosmologicalMassVariance_)
+ integralKernelRate_ =+integralKernelRate(varianceResidual,offsetEffective)
! If the integral kernel is zero (to machine precision) then simply assume no crossing rate.
if (integralKernelRate_ <= 0.0d0) then
firstCrossingRateQuad=0.0d0
@@ -801,11 +801,8 @@ subroutine farahiMidpointRateTabulate(self,varianceProgenitor,time,node)
! Compute the first crossing rate at the first grid point.
offsetEffective =+self%offsetEffective (self%timeRate(iTime),varianceCurrentRateQuad(iVariance),varianceProgenitorRateQuad(1),0.0_kind_quad,0.0_kind_quad,barrierRateQuad(1),barrier,cosmologicalMassVariance_)
varianceResidual =+self%varianceResidual(self%timeRate(iTime),varianceCurrentRateQuad(iVariance),varianceProgenitorRateQuad(1),0.0_kind_quad ,cosmologicalMassVariance_)
- firstCrossingRateQuad(1)=+Error_Function_Complementary( &
- & +offsetEffective &
- & /sqrt(2.0_kind_quad*varianceResidual) &
- & ) &
- & /varianceStepRate &
+ firstCrossingRateQuad(1)=+integralKernelRate(varianceResidual,offsetEffective) &
+ & /varianceStepRate &
& /integralKernelRate_
end if
varianceStepRate=+varianceProgenitorRateQuad(1) &
@@ -830,30 +827,18 @@ subroutine farahiMidpointRateTabulate(self,varianceProgenitor,time,node)
do j=1,i-1
offsetEffective =self%offsetEffective (self%timeRate(iTime),varianceCurrentRateQuad(iVariance),varianceProgenitorRateQuad(i),varianceMidpointRateQuad(j),barrier,effectiveBarrierInitial,barrierMidpointRateQuad(j)-barrier,cosmologicalMassVariance_)
varianceResidual =self%varianceResidual(self%timeRate(iTime),varianceCurrentRateQuad(iVariance),varianceProgenitorRateQuad(i),varianceMidpointRateQuad(j) ,cosmologicalMassVariance_)
- erfcArgumentNumerator =offsetEffective
- erfcArgumentDenominator=sqrt(2.0_kind_quad*varianceResidual)
- if (erfcArgumentNumerator == 0.0_kind_quad .or. exponent(erfcArgumentNumerator)-exponent(erfcArgumentDenominator) > maxExponent(0.0_kind_quad)) then
- erfcValue=1.0_kind_quad
- else
- erfcValue =Error_Function_Complementary(erfcArgumentNumerator/erfcArgumentDenominator)
- end if
- if (erfcValue > 0.0_kind_quad) then
- varianceStepRate =+varianceProgenitorRateQuad(j ) &
- & -varianceProgenitorRateQuad(j-1)
- probabilityCrossingPrior=+probabilityCrossingPrior &
- & +firstCrossingRateQuad (j ) &
- & *varianceStepRate &
- & *erfcValue
- end if
+ varianceStepRate =+varianceProgenitorRateQuad(j ) &
+ & -varianceProgenitorRateQuad(j-1)
+ probabilityCrossingPrior=+probabilityCrossingPrior &
+ & +firstCrossingRateQuad (j ) &
+ & *varianceStepRate &
+ & *integralKernelRate(varianceResidual,offsetEffective)
end do
varianceStepRate =+varianceProgenitorRateQuad(i ) &
& -varianceProgenitorRateQuad(i-1)
offsetEffective =+self%offsetEffective (self%timeRate(iTime),varianceCurrentRateQuad(iVariance),varianceProgenitorRateQuad(i),0.0_kind_quad,barrier,effectiveBarrierInitial,0.0_kind_quad,cosmologicalMassVariance_)
varianceResidual =+self%varianceResidual(self%timeRate(iTime),varianceCurrentRateQuad(iVariance),varianceProgenitorRateQuad(i),0.0_kind_quad ,cosmologicalMassVariance_)
- firstCrossingRateQuad(i)=+Error_Function_Complementary( &
- & +offsetEffective &
- & /sqrt(2.0_kind_quad*varianceResidual) &
- & ) &
+ firstCrossingRateQuad(i)=+integralKernelRate(varianceResidual,offsetEffective) &
& -probabilityCrossingPrior
if ( &
& firstCrossingRateQuad(i) > 0.0d0 &
@@ -1032,6 +1017,7 @@ function integralKernelRate(varianceResidual,offsetEffective)
implicit none
real(kind_quad) :: integralKernelRate
real(kind_quad), intent(in ) :: varianceResidual , offsetEffective
+ real(kind_quad) :: denominator
if (varianceResidual < 0.0_kind_quad) then
integralKernelRate=0.0_kind_quad
@@ -1040,13 +1026,18 @@ function integralKernelRate(varianceResidual,offsetEffective)
if (offsetEffective > 0.0_kind_quad) then
integralKernelRate=0.0_kind_quad
else
- integralKernelRate=1.0_kind_quad
+ integralKernelRate=2.0_kind_quad
end if
else
- integralKernelRate =Error_Function_Complementary( &
- & +offsetEffective &
- & /sqrt(2.0_kind_quad*varianceResidual) &
- & )
+ denominator=sqrt(2.0_kind_quad*varianceResidual)
+ if (offsetEffective == 0.0_kind_quad .or. exponent(offsetEffective)-exponent(denominator) > maxExponent(0.0_kind_quad)) then
+ integralKernelRate=1.0_kind_quad
+ else
+ integralKernelRate=Error_Function_Complementary( &
+ & +offsetEffective &
+ & /denominator &
+ & )
+ end if
end if
return
end function integralKernelRate
diff --git a/source/structure_formation.excursion_sets.first_crossing_distribution.linear_barrier.Brownian_bridge.F90 b/source/structure_formation.excursion_sets.first_crossing_distribution.linear_barrier.Brownian_bridge.F90
new file mode 100644
index 0000000000..38d44e5f19
--- /dev/null
+++ b/source/structure_formation.excursion_sets.first_crossing_distribution.linear_barrier.Brownian_bridge.F90
@@ -0,0 +1,495 @@
+!! Copyright 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018,
+!! 2019, 2020, 2021, 2022, 2023
+!! Andrew Benson
+!!
+!! This file is part of Galacticus.
+!!
+!! Galacticus is free software: you can redistribute it and/or modify
+!! it under the terms of the GNU General Public License as published by
+!! the Free Software Foundation, either version 3 of the License, or
+!! (at your option) any later version.
+!!
+!! Galacticus is distributed in the hope that it will be useful,
+!! but WITHOUT ANY WARRANTY; without even the implied warranty of
+!! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+!! GNU General Public License for more details.
+!!
+!! You should have received a copy of the GNU General Public License
+!! along with Galacticus. If not, see .
+
+!!{
+Contains a module which implements a excursion set first crossing statistics class for linear barriers with constrained branching
+described by a Brownian bridge solution.
+!!}
+
+ use :: Cosmology_Functions , only : cosmologyFunctionsClass
+ use :: Cosmological_Density_Field, only : cosmologicalMassVarianceClass
+ use :: Linear_Growth , only : linearGrowthClass
+ use :: Excursion_Sets_Barriers , only : excursionSetBarrierClass
+
+ !![
+
+
+ An excursion set first crossing statistics class for linear barriers and where the trajectories are constrained to follow a
+ Brownian bridge.
+
+ If we consider the Brownian bridge to originate from $(0,0)$ (i.e. we apply the usual shift of coordinates to move our
+ starting point to the origin), and to end at $(\delta_2,S_2)$ then we can transform this Brownian bridge into the standard
+ bridge with non-zero drift through the transformations:
+ \begin{eqnarray}
+ \tau &=& \frac{S}{S_2}, \\
+ b &=& \frac{\delta_2}{\sqrt{S_2}}.
+ \end{eqnarray}
+ To find the first crossing time distribution we then follow the general approach outlined by \cite{kiwiakos_answer_2014}, but
+ with an important difference that we will detail below.
+
+ The standard Brownian bridge (with no drift), $Y_0$, can be written in terms of a standard Weiner process, $W$, through a
+ change of variables
+ \begin{equation}
+ Y_0(t) = (1-t) W\left(\frac{t}{1-t}\right).
+ \end{equation}
+ The first crossing time distribution for our Brownian bridge can therefore be expressed as:
+ \begin{equation}
+ \tau_{Y}(B) = \mathrm{inf}\left\{ t : Y(t) = B(t)\right\} = \mathrm{inf}\left\{ \mu(t) + (1-t) W\left(\frac{t}{1-t}\right) \right\} = B(t) = \mathrm{inf}\left\{ t : W\left(\frac{t}{1-t}\right) = B(t) - \mu(t) \right\},
+ \end{equation}
+ where $B(t)$ is our barrier, and $\mu(t)$ is the drift term in the Brownian bridge.
+
+ As can be seen from the above, for the case of a linear barrier, a Brownian bridge with non-zero drift effectively results in
+ a new linear barrier equal to the original one minus the drift term, i.e.:
+ \begin{equation}
+ B^\prime(t) \rightarrow B(t) - \mu(t),
+ \end{equation}
+ where $B(S)$ is the barrier and $\mu(S)$ is the Brownian bridge term. This means that the first-crossing time of the
+ Brownian bridge is just the hitting time of this time changed Weiner process. That is, is
+ \begin{equation}
+ \tau_W(B) = \mathrm{inf}\left\{ s : W(s) = B\right\},
+ \end{equation}
+ then
+ \begin{equation}
+ \frac{\tau_Y(B)}{1 - \tau_Y(B)} = \tau_W(B) \implies \tau_Y(B) = \frac{\tau_W(B)}{1+\tau_W(B)}.
+ \end{equation}
+ Here is where the solution presented by \cite{kiwiakos_answer_2014} is slightly wrong. We must use the first crossing time
+ solution for the standard Weiner process, but with a \emph{linear} barrier (because, even if the actual barrier is constant,
+ the effective barrier is linear due to the Brownian bridge drift term). Therefore (e.g. \citealt{zhang_random_2006}):
+ \begin{equation}
+ f_{W}(\tau_{W}) = B(0) \exp\left( - \frac{B(\tau_{W})^2}{2\ tau_{W}} \right) / \sqrt{2 pi \tau_{W}^3}.
+ \end{equation}
+ We then have that
+ \begin{equation}
+ f_{Y}(\tau_{Y}) \mathrm{d}\tau_{Y} = f_{W}(\tau_{W}) \mathrm{d}\tau_{W},
+ \end{equation}
+ such that
+ \begin{equation}
+ f_{Y}(\tau_{Y}) = \frac{B(0)}{\sqrt{2 \pi \tau_{Y}^3 (1 - \tau_{Y})}} \exp\left( \frac{B^{\prime 2}(\tau_{Y})}{2 \tau_{Y} (1-\tau_{Y})} \right),
+ \end{equation}
+ or, expressed in our usual variables
+ \begin{equation}
+ f(S) = \frac{S(0)}{\sqrt{2 \pi S^3 (1 - S/S_2)}} \exp\left( \frac{[B(S)-\mu(S)]^2}{2 S (1-S/S_2)} \right).
+ \end{equation}
+
+
+ !!]
+ type, extends(excursionSetFirstCrossingClass) :: excursionSetFirstCrossingLinearBarrierBrownianBridge
+ !!{
+ A linearBarrierBrownianBridge excursion set barrier class.
+ !!}
+ private
+ class (cosmologyFunctionsClass ), pointer :: cosmologyFunctions_ => null()
+ class (excursionSetBarrierClass ), pointer :: excursionSetBarrier_ => null()
+ class (excursionSetFirstCrossingClass), pointer :: excursionSetFirstCrossing_ => null()
+ class (cosmologicalMassVarianceClass ), pointer :: cosmologicalMassVariance_ => null()
+ class (linearGrowthClass ), pointer :: linearGrowth_ => null()
+ class (criticalOverdensityClass ), pointer :: criticalOverdensity_ => null()
+ double precision :: criticalOverdensityConstrained , varianceConstrained, &
+ & timeConstrained , massConstrained , &
+ & redshiftConstrained
+ ! The fractional step in time used to compute barrier crossing rates.
+ double precision :: fractionalTimeStep
+ contains
+ final :: linearBarrierBrownianBridgeDestructor
+ procedure :: probability => linearBarrierBrownianBridgeProbability
+ procedure :: rate => linearBarrierBrownianBridgeRate
+ procedure :: rateNonCrossing => linearBarrierBrownianBridgeRateNonCrossing
+ end type excursionSetFirstCrossingLinearBarrierBrownianBridge
+
+ interface excursionSetFirstCrossingLinearBarrierBrownianBridge
+ !!{
+ Constructors for the linearBarrierBrownianBridge excursion set barrier class.
+ !!}
+ module procedure linearBarrierBrownianBridgeConstructorParameters
+ module procedure linearBarrierBrownianBridgeConstructorInternal
+ end interface excursionSetFirstCrossingLinearBarrierBrownianBridge
+
+contains
+
+ function linearBarrierBrownianBridgeConstructorParameters(parameters) result(self)
+ !!{
+ Constructor for the linear barrier excursion set class first crossing class which takes a parameter set as input.
+ !!}
+ use :: Input_Parameters, only : inputParameter, inputParameters
+ implicit none
+ type (excursionSetFirstCrossingLinearBarrierBrownianBridge) :: self
+ type (inputParameters ), intent(inout) :: parameters
+ class (cosmologyFunctionsClass ), pointer :: cosmologyFunctions_
+ class (excursionSetFirstCrossingClass ), pointer :: excursionSetFirstCrossing_
+ class (excursionSetBarrierClass ), pointer :: excursionSetBarrier_
+ class (cosmologicalMassVarianceClass ), pointer :: cosmologicalMassVariance_
+ class (linearGrowthClass ), pointer :: linearGrowth_
+ class (criticalOverdensityClass ), pointer :: criticalOverdensity_
+ double precision :: criticalOverdensityConstrained, varianceConstrained, &
+ & timeConstrained , massConstrained , &
+ & timePresent , redshiftConstrained, &
+ & expansionFactor , fractionalTimeStep
+
+ !![
+
+
+
+
+
+
+
+ fractionalTimeStep
+ 0.01d0
+
+ The fractional time step used when computing barrier crossing rates (i.e. the step used in finite difference calculations).
+
+ !!]
+ timePresent=cosmologyFunctions_%cosmicTime(expansionFactor=1.0d0)
+ if (parameters%isPresent('criticalOverdensityConstrained')) then
+ if ( &
+ & .not.parameters%isPresent('varianceConstrained') &
+ & ) call Error_Report('both "criticalOverdensityConstrained" and "varianceConstrained" must be provided' //{introspection:location})
+ if ( &
+ & parameters%isPresent('redshiftConstrained' ) &
+ & .or. &
+ & parameters%isPresent('massConstrained' ) &
+ & ) call Error_Report('can not mix "criticalOverdensityConstrained/varianceConstrained" and "redshiftConstrained/massConstrained" constraints'//{introspection:location})
+ !![
+
+ criticalOverdensityConstrained
+
+ The critical overdensity at the end of the Brownian bridge.
+
+
+ varianceConstrained
+
+ The variance at the end of the Brownian bridge.
+
+ !!]
+ massConstrained=cosmologicalMassVariance_%mass (time =timePresent ,rootVariance=sqrt(varianceConstrained))
+ timeConstrained=criticalOverdensity_ %timeOfCollapse(criticalOverdensity=criticalOverdensityConstrained,mass = massConstrained )
+ else if (parameters%isPresent('redshiftConstrained ')) then
+ if ( &
+ & .not.parameters%isPresent('massConstrained' ) &
+ & ) call Error_Report('both "redshiftConstrained" and "massConstrained" must be provided' //{introspection:location})
+ if ( &
+ & parameters%isPresent('criticalOverdensityConstrained') &
+ & .or. &
+ & parameters%isPresent('varianceConstrained' ) &
+ & ) call Error_Report('can not mix "criticalOverdensityConstrained/varianceConstrained" and "redshiftConstrained/massConstrained" constraints'//{introspection:location})
+ !![
+
+ redshiftConstrained
+
+ The redshift at the end of the Brownian bridge.
+
+
+ massConstrained
+
+ The halo mass at the end of the Brownian bridge.
+
+ !!]
+ expansionFactor =+cosmologyFunctions_ %expansionFactorFromRedshift(redshift =redshiftConstrained )
+ timeConstrained =+cosmologyFunctions_ %cosmicTime (expansionFactor=expansionFactor )
+ criticalOverdensityConstrained=+criticalOverdensity_ %value (time =timeConstrained,mass=massConstrained) &
+ & /linearGrowth_ %value (time =timeConstrained )
+ varianceConstrained =+cosmologicalMassVariance_%rootVariance (time =timePresent ,mass=massConstrained)**2
+ else
+ criticalOverdensityConstrained=0.0d0
+ varianceConstrained =0.0d0
+ timeConstrained =0.0d0
+ massConstrained =0.0d0
+ call Error_Report('must provide either [criticalOverdensityConstrained] and [varianceConstrained], or [timeConstrained] and [massConstrained]')
+ end if
+ self=excursionSetFirstCrossingLinearBarrierBrownianBridge(varianceConstrained,criticalOverdensityConstrained,fractionalTimeStep,cosmologyFunctions_,excursionSetFirstCrossing_,excursionSetBarrier_,cosmologicalMassVariance_,criticalOverdensity_,linearGrowth_)
+ !![
+
+
+
+
+
+
+ !!]
+ return
+ end function linearBarrierBrownianBridgeConstructorParameters
+
+ function linearBarrierBrownianBridgeConstructorInternal(varianceConstrained,criticalOverdensityConstrained,fractionalTimeStep,cosmologyFunctions_,excursionSetFirstCrossing_,excursionSetBarrier_,cosmologicalMassVariance_,criticalOverdensity_,linearGrowth_) result(self)
+ !!{
+ Constructor for the linear barrier excursion set class first crossing class which takes a parameter set as input.
+ !!}
+ implicit none
+ type (excursionSetFirstCrossingLinearBarrierBrownianBridge) :: self
+ double precision , intent(in ) :: varianceConstrained , criticalOverdensityConstrained, &
+ & fractionalTimeStep
+ class (cosmologyFunctionsClass ), intent(in ), target :: cosmologyFunctions_
+ class (excursionSetFirstCrossingClass ), intent(in ), target :: excursionSetFirstCrossing_
+ class (excursionSetBarrierClass ), intent(in ), target :: excursionSetBarrier_
+ class (cosmologicalMassVarianceClass ), intent(in ), target :: cosmologicalMassVariance_
+ class (linearGrowthClass ), intent(in ), target :: linearGrowth_
+ class (criticalOverdensityClass ), intent(in ), target :: criticalOverdensity_
+ double precision :: timePresent , expansionFactor
+ !![
+
+ !!]
+
+ ! Find mass and time corresponding to the constraint point.
+ timePresent =self%cosmologyFunctions_ %cosmicTime (expansionFactor =1.0d0 )
+ self%massConstrained =self%cosmologicalMassVariance_%mass (time =timePresent ,rootVariance=sqrt(self%varianceConstrained))
+ self%timeConstrained =self%criticalOverdensity_ %timeOfCollapse (criticalOverdensity=self%criticalOverdensityConstrained,mass = self%massConstrained )
+ expansionFactor =self%cosmologyFunctions_ %expansionFactor (time =self%timeConstrained )
+ self%redshiftConstrained=self%cosmologyFunctions_ %redshiftFromExpansionFactor(expansionFactor =expansionFactor )
+ return
+ end function linearBarrierBrownianBridgeConstructorInternal
+
+ subroutine linearBarrierBrownianBridgeDestructor(self)
+ !!{
+ Destructor for the critical overdensity excursion set barrier class.
+ !!}
+ implicit none
+ type(excursionSetFirstCrossingLinearBarrierBrownianBridge), intent(inout) :: self
+
+ !![
+
+
+
+
+
+
+ !!]
+ return
+ end subroutine linearBarrierBrownianBridgeDestructor
+
+ double precision function linearBarrierBrownianBridgeProbability(self,variance,time,node) result(probability)
+ !!{
+ Return the excursion set barrier at the given variance and time.
+ !!}
+ use :: Numerical_Constants_Math, only : Pi
+ implicit none
+ class (excursionSetFirstCrossingLinearBarrierBrownianBridge), intent(inout) :: self
+ double precision , intent(in ) :: variance , time
+ type (treeNode ), intent(inout) :: node
+ double precision :: varianceConstrained, criticalOverdensityConstrained, &
+ & barrierConstrained , rootVarianceConstrained
+
+ ! Determine effective constraint point at this epoch.
+ rootVarianceConstrained =+self%cosmologicalMassVariance_%rootVariance(self%massConstrained,time)
+ varianceConstrained =+rootVarianceConstrained**2
+ criticalOverdensityConstrained=+self%criticalOverdensityConstrained &
+ & *sqrt( &
+ & + varianceConstrained &
+ & /self%varianceConstrained &
+ & )
+ ! Determine whether to use the conditioned or unconditioned solutions.
+ if (self%excursionSetBarrier_%barrier(variance,time,node,rateCompute=.true.) > criticalOverdensityConstrained) then
+ ! The time corresponds to a barrier above the constrained point. Therefore we want the unconstrained solution.
+ probability =+self%excursionSetFirstCrossing_%probability(variance,time,node)
+ else if ( variance >= varianceConstrained ) then
+ ! For variances in excess of the constrained variance the first crossing probability must be zero.
+ probability =+0.0d0
+ else
+ barrierConstrained=+( &
+ & + self%excursionSetBarrier_%barrier (variance,time,node,rateCompute=.false.) &
+ & -( &
+ & + criticalOverdensityConstrained &
+ & -self%excursionSetBarrier_%barrier (variance,time,node,rateCompute=.false.) &
+ & ) &
+ & *variance &
+ & /varianceConstrained &
+ & ) &
+ & /( &
+ & +1.0d0 &
+ & -variance &
+ & /varianceConstrained &
+ & )
+ probability =+ self%excursionSetBarrier_%barrier( 0.0d0,time,node,rateCompute=.false.) &
+ & *exp( &
+ & -0.5d0 &
+ & *( &
+ & +self%excursionSetBarrier_%barrier (variance,time,node,rateCompute=.false.) &
+ & - criticalOverdensityConstrained &
+ & *variance &
+ & /varianceConstrained &
+ & )**2 &
+ & *( &
+ & +1.0d0 &
+ & -variance &
+ & /varianceConstrained &
+ & ) &
+ & / variance &
+ & ) &
+ & /sqrt( &
+ & +2.0d0 &
+ & *Pi &
+ & * variance **3 &
+ & *( &
+ & +1.0d0 &
+ & -variance &
+ & /varianceConstrained &
+ & ) &
+ & )
+ end if
+ return
+ end function linearBarrierBrownianBridgeProbability
+
+ double precision function linearBarrierBrownianBridgeRate(self,variance,varianceProgenitor,time,node) result(rate)
+ !!{
+ Return the excursion set barrier at the given variance and time.
+ !!}
+ use :: Numerical_Constants_Math, only : Pi
+ implicit none
+ class (excursionSetFirstCrossingLinearBarrierBrownianBridge), intent(inout) :: self
+ double precision , intent(in ) :: variance , varianceProgenitor , &
+ & time
+ type (treeNode ), intent(inout) :: node
+ double precision :: timeProgenitor , massProgenitor , &
+ & growthFactorEffective , varianceConstrained , &
+ & criticalOverdensityConstrained, varianceDifference , &
+ & varianceConstrainedDifference , rootVarianceConstrained
+
+ ! Determine effective constraint point at this epoch.
+ rootVarianceConstrained =+self%cosmologicalMassVariance_%rootVariance(self%massConstrained,time)
+ varianceConstrained =+rootVarianceConstrained**2
+ criticalOverdensityConstrained=+self%criticalOverdensityConstrained &
+ & *sqrt( &
+ & + varianceConstrained &
+ & /self%varianceConstrained &
+ & )
+ ! Determine whether to use the conditioned or unconditioned solutions.
+ if (self%excursionSetBarrier_%barrier(varianceProgenitor,time,node,rateCompute=.true.) > criticalOverdensityConstrained) then
+ ! The time corresponds to a barrier above the constrained point. Therefore we want the unconstrained solution.
+ rate=self%excursionSetFirstCrossing_%rate (variance,varianceProgenitor,time,node)
+ else if ( varianceProgenitor >= varianceConstrained ) then
+ ! For progenitor variances in excess of the constrained variance the first crossing rate must be zero.
+ rate=0.0d0
+ else
+ ! * To estimate the rate we use a finite difference method - we compute the effective barrier for a slightly earlier time,
+ ! compute the fraction of trajectories which will have crossed that effective barrier, and divide by the time difference.
+ !
+ ! * In Galacticus, the time evolution due to linear growth is included in the root-variance of the density field, *not* in
+ ! the barrier height as is often done in the literature. As such, when computing the barrier at some earlier time we must
+ ! account for the fact that, at a fixed mass, the root variance will be smaller at that earlier time. Since the solution
+ ! to the excursion set problem must always be a function of δc(M,t)/√S(M,t) then we can simply scale δc by the ratio of
+ ! root-variances for the progenitor at the current and earlier times.
+ timeProgenitor =+time*(1.0d0-self%fractionalTimeStep)
+ massProgenitor =+self%cosmologicalMassVariance_%mass (sqrt(varianceProgenitor),time )
+ growthFactorEffective =+self%cosmologicalMassVariance_%rootVariance( massProgenitor ,time ) &
+ & /self%cosmologicalMassVariance_%rootVariance( massProgenitor ,timeProgenitor)
+ varianceDifference =+varianceProgenitor -variance
+ varianceConstrainedDifference=+varianceConstrained-variance
+ rate =+ barrierEffective (variance,time,variance ,timeProgenitor) &
+ & *exp( &
+ & -0.5d0 &
+ & *barrierEffectiveConstrained(variance,time,varianceProgenitor,timeProgenitor)**2 &
+ & *( &
+ & +1.0d0 &
+ & -varianceDifference &
+ & /varianceConstrainedDifference &
+ & ) &
+ & / varianceDifference &
+ & ) &
+ & /sqrt( &
+ & +2.0d0 &
+ & *Pi &
+ & * varianceDifference **3 &
+ & *( &
+ & +1.0d0 &
+ & -varianceDifference &
+ & /varianceConstrainedDifference &
+ & ) &
+ & ) &
+ & /time &
+ & /self%fractionalTimeStep
+ rate =max( &
+ & rate , &
+ & 0.0d0 &
+ & )
+ end if
+ return
+
+ contains
+
+ double precision function barrierEffective(variance0,time0,variance1,time1)
+ !!{
+ The effective barrier for conditional excursion sets.
+ !!}
+ implicit none
+ double precision, intent(in ) :: time1 , time0 , &
+ & variance1, variance0
+
+ barrierEffective=+self%excursionSetBarrier_%barrier(variance1,time1,node,rateCompute=.false.)*growthFactorEffective &
+ & -self%excursionSetBarrier_%barrier(variance0,time0,node,rateCompute=.false.)
+ return
+ end function barrierEffective
+
+ double precision function barrierEffectiveConstrained(variance0,time0,variance1,time1)
+ !!{
+ The effective barrier for conditional excursion sets.
+ !!}
+ implicit none
+ double precision, intent(in ) :: time1 , time0 , &
+ & variance1, variance0
+
+ barrierEffectiveConstrained=+( &
+ & +( &
+ & +self%excursionSetBarrier_%barrier (variance1,time1,node,rateCompute=.false.)*growthFactorEffective &
+ & -self%excursionSetBarrier_%barrier (variance0,time0,node,rateCompute=.false.) &
+ & ) &
+ & -( &
+ & + criticalOverdensityConstrained &
+ & -self%excursionSetBarrier_%barrier (variance0,time0,node,rateCompute=.false.) &
+ & ) &
+ & *varianceDifference &
+ & /varianceConstrainedDifference &
+ & ) &
+ & /( &
+ & +1.0d0 &
+ & -varianceDifference &
+ & /varianceConstrainedDifference &
+ & )
+ return
+ end function barrierEffectiveConstrained
+
+ end function linearBarrierBrownianBridgeRate
+
+ double precision function linearBarrierBrownianBridgeRateNonCrossing(self,variance,massMinimum,time,node) result(rateNonCrossing)
+ !!{
+ Return the rate for excursion set non-crossing assuming a linear barrier.
+ !!}
+ implicit none
+ class (excursionSetFirstCrossingLinearBarrierBrownianBridge), intent(inout) :: self
+ double precision , intent(in ) :: time , variance , &
+ & massMinimum
+ type (treeNode ), intent(inout) :: node
+ double precision :: rootVarianceConstrained , varianceConstrained, &
+ & criticalOverdensityConstrained
+
+ ! Determine effective constraint point at this epoch.
+ rootVarianceConstrained =+self%cosmologicalMassVariance_%rootVariance(self%massConstrained,time)
+ varianceConstrained =+rootVarianceConstrained**2
+ criticalOverdensityConstrained=+self%criticalOverdensityConstrained &
+ & *sqrt( &
+ & + varianceConstrained &
+ & /self%varianceConstrained &
+ & )
+ ! Determine whether to use the conditioned or unconditioned solutions.
+ if (self%excursionSetBarrier_%barrier(variance,time,node,rateCompute=.true.) > criticalOverdensityConstrained) then
+ ! The time corresponds to a barrier above the constrained point. Therefore we want the unconstrained solution.
+ rateNonCrossing=self%excursionSetFirstCrossing_%rateNonCrossing(variance,massMinimum,time,node)
+ else
+ ! Use the constrained solution. By definition, all trajectories cross the barrier before the constrained point. Therefore,
+ ! the non-crossing rate is always zero.
+ rateNonCrossing=0.0d0
+ end if
+ return
+ end function linearBarrierBrownianBridgeRateNonCrossing
diff --git a/source/tasks.evolve_forests.F90 b/source/tasks.evolve_forests.F90
index 22d5064592..0f56e7f51d 100644
--- a/source/tasks.evolve_forests.F90
+++ b/source/tasks.evolve_forests.F90
@@ -513,8 +513,6 @@ subroutine evolveForestsPerform(self,status)
!$omp barrier
! Begin processing trees.
treeProcess : do while (.not.finished)
- ! Report on memory utilization.
- call reportMemoryUsage()
! Attempt to get a new tree to process. We first try to get a new tree. If no new trees exist, we will look for a tree on
! the stack waiting to be processed.
! Perform any pre-tree construction tasks.
@@ -548,6 +546,8 @@ subroutine evolveForestsPerform(self,status)
treeIsNew=.false.
finished =.not.associated(tree)
end if
+ ! Report on memory utilization.
+ call reportMemoryUsage()
! If we got a tree (i.e. we are not "finished") process it.
if (.not.finished) then
treeIsFinished=.false.
@@ -703,6 +703,8 @@ subroutine evolveForestsPerform(self,status)
! until that event's task is performed.
exit
else
+ ! Report on memory utilization.
+ call reportMemoryUsage()
! Tree reached an output time, so output it. We can then continue evolving.
write (label,'(f7.2)') evolveToTime
message="Output tree data at t="//trim(label)//" Gyr"
diff --git a/source/tests.merger_tree_branching.F90 b/source/tests.merger_tree_branching.F90
index 726643f02a..16924f5ad1 100644
--- a/source/tests.merger_tree_branching.F90
+++ b/source/tests.merger_tree_branching.F90
@@ -28,46 +28,58 @@ program Tests_Merger_Tree_Branching
use :: Display , only : displayVerbositySet , verbosityLevelWorking
use :: Events_Hooks , only : eventsHooksInitialize
use :: Excursion_Sets_Barriers , only : excursionSetBarrierCriticalOverdensity
- use :: Excursion_Sets_First_Crossings , only : excursionSetFirstCrossingFarahiMidpoint , excursionSetFirstCrossingLinearBarrier
+ use :: Excursion_Sets_First_Crossings , only : excursionSetFirstCrossingFarahiMidpoint , excursionSetFirstCrossingFarahiMidpointBrownianBridge , excursionSetFirstCrossingLinearBarrier, excursionSetFirstCrossingLinearBarrierBrownianBridge
use :: Galacticus_Nodes , only : treeNode
use :: ISO_Varying_String , only : var_str
use :: Linear_Growth , only : linearGrowthCollisionlessMatter
use :: Merger_Tree_Branching , only : mergerTreeBranchingProbabilityGnrlzdPrssSchchtr, mergerTreeBranchingProbabilityParkinsonColeHelly
- use :: Merger_Tree_Branching_Modifiers , only : mergerTreeBranchingProbabilityModifierIdentity
+ use :: Merger_Tree_Branching_Modifiers , only : mergerTreeBranchingProbabilityModifierIdentity , mergerTreeBranchingProbabilityModifierPCHPlus
+ use :: Numerical_Constants_Math , only : Pi
use :: Power_Spectra_Primordial , only : powerSpectrumPrimordialPowerLaw
use :: Power_Spectra_Primordial_Transferred, only : powerSpectrumPrimordialTransferredSimple
use :: Power_Spectrum_Window_Functions , only : powerSpectrumWindowFunctionSharpKSpace
use :: Transfer_Functions , only : transferFunctionIdentity
- use :: Unit_Tests , only : Assert , Unit_Tests_Begin_Group , Unit_Tests_End_Group, Unit_Tests_Finish
+ use :: Unit_Tests , only : Assert , Unit_Tests_Begin_Group , Unit_Tests_End_Group , Unit_Tests_Finish , &
+ & compareLessThan
implicit none
- type (cosmologyParametersSimple ) :: cosmologyParametersSimple_
- type (cosmologyFunctionsMatterLambda ) :: cosmologyFunctionsMatterLambda_
- type (cosmologicalMassVarianceFilteredPower ) :: cosmologicalMassVarianceFilteredPower_
- type (powerSpectrumWindowFunctionSharpKSpace ) :: powerSpectrumWindowFunctionSharpKSpace_
- type (powerSpectrumPrimordialPowerLaw ) :: powerSpectrumPrimordialPowerLaw_
- type (transferFunctionIdentity ) :: transferFunctionIdentity_
- type (powerSpectrumPrimordialTransferredSimple ) :: powerSpectrumPrimordialTransferredSimple_
- type (linearGrowthCollisionlessMatter ) :: linearGrowthCollisionlessMatter_
- type (excursionSetFirstCrossingLinearBarrier ) :: excursionSetFirstCrossingLinearBarrier_
- type (excursionSetFirstCrossingFarahiMidpoint ) :: excursionSetFirstCrossingFarahiMidpoint_
- type (excursionSetBarrierCriticalOverdensity ) :: excursionSetBarrierCriticalOverdensity_
- type (darkMatterParticleCDM ) :: darkMatterParticleCDM_
- type (treeNode ) :: node
- type (mergerTreeBranchingProbabilityModifierIdentity ) :: mergerTreeBranchingProbabilityModifierIdentity_
- type (criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt) :: criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt_
- type (mergerTreeBranchingProbabilityParkinsonColeHelly ), dimension(3) :: mergerTreeBranchingProbabilityParkinsonColeHelly_
- type (mergerTreeBranchingProbabilityGnrlzdPrssSchchtr ), dimension(5) :: mergerTreeBranchingProbabilityGnrlzdPrssSchchtr_
- double precision :: time , rootVarianceParent , &
- & rootVarianceResolution , branchingProbabilityRate , &
- & accretionRate , criticalOverdensity_ , &
- & expansionFactor , timeNow , &
- & branchingProbabilityRateTargetGeneral , accretionRateTargetGeneral , &
- & smoothAccretionRateTargetGeneral , smoothAccretionRate
- double precision , parameter :: branchingProbabilityRateTarget =2.498324530964044d0, accretionRateTarget =4.181139013841312d-1
- double precision , dimension(2) :: redshift =[0.0d0,1.0d0]
- double precision , parameter :: massParent =1.0d12 , massResolution =1.0d11
- integer :: i
- character (len=16 ) :: label
+ type (cosmologyParametersSimple ) :: cosmologyParametersSimple_
+ type (cosmologyFunctionsMatterLambda ) :: cosmologyFunctionsMatterLambda_
+ type (cosmologicalMassVarianceFilteredPower ) :: cosmologicalMassVarianceFilteredPower_
+ type (powerSpectrumWindowFunctionSharpKSpace ) :: powerSpectrumWindowFunctionSharpKSpace_
+ type (powerSpectrumPrimordialPowerLaw ) :: powerSpectrumPrimordialPowerLaw_
+ type (transferFunctionIdentity ) :: transferFunctionIdentity_
+ type (powerSpectrumPrimordialTransferredSimple ) :: powerSpectrumPrimordialTransferredSimple_
+ type (linearGrowthCollisionlessMatter ) :: linearGrowthCollisionlessMatter_
+ type (excursionSetFirstCrossingLinearBarrier ) :: excursionSetFirstCrossingLinearBarrier_
+ type (excursionSetFirstCrossingLinearBarrierBrownianBridge ) :: excursionSetFirstCrossingLinearBarrierBrownianBridge_
+ type (excursionSetFirstCrossingFarahiMidpoint ) :: excursionSetFirstCrossingFarahiMidpoint_
+ type (excursionSetFirstCrossingFarahiMidpointBrownianBridge ) :: excursionSetFirstCrossingFarahiMidpointBrownianBridge_
+ type (excursionSetBarrierCriticalOverdensity ) :: excursionSetBarrierCriticalOverdensity_
+ type (darkMatterParticleCDM ) :: darkMatterParticleCDM_
+ type (treeNode ) :: node
+ type (mergerTreeBranchingProbabilityModifierIdentity ) :: mergerTreeBranchingProbabilityModifierIdentity_
+ type (criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt) :: criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt_
+ type (mergerTreeBranchingProbabilityParkinsonColeHelly ), dimension( 3) :: mergerTreeBranchingProbabilityParkinsonColeHelly_
+ type (mergerTreeBranchingProbabilityGnrlzdPrssSchchtr ), dimension( 5) :: mergerTreeBranchingProbabilityGnrlzdPrssSchchtr_
+ double precision :: time , rootVarianceParent , &
+ & rootVarianceResolution , branchingProbabilityRate , &
+ & accretionRate , criticalOverdensity_ , &
+ & expansionFactor , timeNow , &
+ & branchingProbabilityRateTargetGeneral , accretionRateTargetGeneral , &
+ & smoothAccretionRateTargetGeneral , smoothAccretionRate , &
+ & varianceParent , varianceResolution , &
+ & errorMaximumUnconstrained , errorMaximumConstrained
+ double precision , parameter :: branchingProbabilityRateTarget =2.498324530964044d0, accretionRateTarget =4.181139013841312d-1
+ double precision , dimension( 2) :: redshift =[0.0d0,1.0d0]
+ double precision , parameter :: massParent =1.0d12 , massResolution =1.0d11 , &
+ & fractionalTimeStep =1.0d-3 , varianceConstrained =2.0d02 , &
+ & criticalOverdensityConstrained =4.0d00 , toleranceVariance =4.0d-2
+ integer , parameter :: countVariance =1000
+ double precision , dimension(countVariance) :: branchingRateUnconstrainedAnalytic , branchingRateUnconstrainedNumerical , &
+ & branchingRateConstrainedAnalytic , branchingRateConstrainedNumerical , &
+ & variance_
+ integer :: i , j
+ character (len=16 ) :: label
! Set verbosity level.
call displayVerbositySet(verbosityLevelWorking)
@@ -124,13 +136,15 @@ program Tests_Merger_Tree_Branching
& powerSpectrumPrimordialTransferred_ =powerSpectrumPrimordialTransferredSimple_ , &
& powerSpectrumWindowFunction_ =powerSpectrumWindowFunctionSharpKSpace_ &
& )
- criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt_=criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt( &
+ criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt_=criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt ( &
& linearGrowth_ =linearGrowthCollisionlessMatter_ , &
& cosmologyFunctions_ =cosmologyFunctionsMatterLambda_ , &
& cosmologicalMassVariance_ =cosmologicalMassVarianceFilteredPower_ , &
& darkMatterParticle_ =darkMatterParticleCDM_ , &
& tableStore =.true. &
& )
+ mergerTreeBranchingProbabilityModifierIdentity_ =mergerTreeBranchingProbabilityModifierIdentity ( &
+ & )
mergerTreeBranchingProbabilityParkinsonColeHelly_(1) =mergerTreeBranchingProbabilityParkinsonColeHelly ( &
& G0 =1.0d+0 , &
& gamma1 =0.0d+0 , &
@@ -172,12 +186,23 @@ program Tests_Merger_Tree_Branching
& cosmologicalMassVariance_ =cosmologicalMassVarianceFilteredPower_ &
& )
excursionSetFirstCrossingLinearBarrier_ =excursionSetFirstCrossingLinearBarrier ( &
- & fractionalTimeStep =0.001d0 , &
+ & fractionalTimeStep =fractionalTimeStep , &
& excursionSetBarrier_ =excursionSetBarrierCriticalOverdensity_ , &
& cosmologicalMassVariance_ =cosmologicalMassVarianceFilteredPower_ &
& )
+ excursionSetFirstCrossingLinearBarrierBrownianBridge_ =excursionSetFirstCrossingLinearBarrierBrownianBridge ( &
+ & varianceConstrained =varianceConstrained , &
+ & criticalOverdensityConstrained =criticalOverdensityConstrained , &
+ & fractionalTimeStep =fractionalTimeStep , &
+ & cosmologyFunctions_ =cosmologyFunctionsMatterLambda_ , &
+ & excursionSetFirstCrossing_ =excursionSetFirstCrossingLinearBarrier_ , &
+ & excursionSetBarrier_ =excursionSetBarrierCriticalOverdensity_ , &
+ & cosmologicalMassVariance_ =cosmologicalMassVarianceFilteredPower_ , &
+ & criticalOverdensity_ =criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt_, &
+ & linearGrowth_ =linearGrowthCollisionlessMatter_ &
+ & )
excursionSetFirstCrossingFarahiMidpoint_ =excursionSetFirstCrossingFarahiMidpoint ( &
- & fractionalTimeStep =0.001d0 , &
+ & fractionalTimeStep =fractionalTimeStep , &
& fileName =var_str("auto") , &
& varianceNumberPerUnitProbability =100 , &
& varianceNumberPerUnit = 16 , &
@@ -189,11 +214,30 @@ program Tests_Merger_Tree_Branching
& excursionSetBarrier_ =excursionSetBarrierCriticalOverdensity_ , &
& cosmologicalMassVariance_ =cosmologicalMassVarianceFilteredPower_ &
& )
+ excursionSetFirstCrossingFarahiMidpointBrownianBridge_ =excursionSetFirstCrossingFarahiMidpointBrownianBridge ( &
+ & varianceConstrained =varianceConstrained , &
+ & criticalOverdensityConstrained = criticalOverdensityConstrained , &
+ & fractionalTimeStep =fractionalTimeStep , &
+ & fileName =var_str("auto") , &
+ & varianceNumberPerUnitProbability =100 , &
+ & varianceNumberPerUnit = 32 , &
+ & varianceNumberPerDecade = 32 , &
+ & varianceNumberPerDecadeNonCrossing = 8 , &
+ & timeNumberPerDecade = 64 , &
+ & varianceIsUnlimited =.false. , &
+ & cosmologyFunctions_ =cosmologyFunctionsMatterLambda_ , &
+ & excursionSetBarrier_ =excursionSetBarrierCriticalOverdensity_ , &
+ & cosmologicalMassVariance_ =cosmologicalMassVarianceFilteredPower_ , &
+ & criticalOverdensity_ =criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt_, &
+ & linearGrowth_ =linearGrowthCollisionlessMatter_ , &
+ & excursionSetFirstCrossing_ =excursionSetFirstCrossingLinearBarrier_ &
+ & )
mergerTreeBranchingProbabilityGnrlzdPrssSchchtr_(1) =mergerTreeBranchingProbabilityGnrlzdPrssSchchtr ( &
& deltaStepMaximum =1.0d-1 , &
& massMinimum =1.0d+0 , &
& smoothAccretion =.false. , &
& distributionFunctionLowerHalfOnly =.true. , &
+ & distributionFunctionNormalize =.true. , &
& cosmologyFunctions_ =cosmologyFunctionsMatterLambda_ , &
& criticalOverdensity_ =criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt_, &
& cosmologicalMassVariance_ =cosmologicalMassVarianceFilteredPower_ , &
@@ -205,6 +249,7 @@ program Tests_Merger_Tree_Branching
& massMinimum =1.0d-1*massResolution , &
& smoothAccretion =.false. , &
& distributionFunctionLowerHalfOnly =.true. , &
+ & distributionFunctionNormalize =.true. , &
& cosmologyFunctions_ =cosmologyFunctionsMatterLambda_ , &
& criticalOverdensity_ =criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt_, &
& cosmologicalMassVariance_ =cosmologicalMassVarianceFilteredPower_ , &
@@ -216,6 +261,7 @@ program Tests_Merger_Tree_Branching
& massMinimum =1.0d-1*massResolution , &
& smoothAccretion =.false. , &
& distributionFunctionLowerHalfOnly =.true. , &
+ & distributionFunctionNormalize =.true. , &
& cosmologyFunctions_ =cosmologyFunctionsMatterLambda_ , &
& criticalOverdensity_ =criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt_, &
& cosmologicalMassVariance_ =cosmologicalMassVarianceFilteredPower_ , &
@@ -227,6 +273,7 @@ program Tests_Merger_Tree_Branching
& massMinimum =1.0d-1*massResolution , &
& smoothAccretion =.true. , &
& distributionFunctionLowerHalfOnly =.true. , &
+ & distributionFunctionNormalize =.true. , &
& cosmologyFunctions_ =cosmologyFunctionsMatterLambda_ , &
& criticalOverdensity_ =criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt_, &
& cosmologicalMassVariance_ =cosmologicalMassVarianceFilteredPower_ , &
@@ -238,6 +285,7 @@ program Tests_Merger_Tree_Branching
& massMinimum =1.0d-1*massResolution , &
& smoothAccretion =.true. , &
& distributionFunctionLowerHalfOnly =.true. , &
+ & distributionFunctionNormalize =.true. , &
& cosmologyFunctions_ =cosmologyFunctionsMatterLambda_ , &
& criticalOverdensity_ =criticalOverdensitySphericalCollapseClsnlssMttrCsmlgclCnstnt_, &
& cosmologicalMassVariance_ =cosmologicalMassVarianceFilteredPower_ , &
@@ -305,6 +353,37 @@ program Tests_Merger_Tree_Branching
call Assert('Smooth accretion rate' ,smoothAccretionRate ,smoothAccretionRateTargetGeneral ,relTol=2.5d-2)
call Unit_Tests_End_Group ( )
call Unit_Tests_End_Group ( )
+ call Unit_Tests_Begin_Group("First crossing distribution (numerical)" )
+ write (400,*) "variance , uncon_num, con_num, uncon_alytc, con_alytc"
+ varianceParent =cosmologicalMassVarianceFilteredPower_%rootVariance(massParent ,time)**2
+ varianceResolution=cosmologicalMassVarianceFilteredPower_%rootVariance(massResolution,time)**2
+ do j=1,countVariance
+ variance_ (j)=+varianceResolution &
+ & +( &
+ & +varianceParent &
+ & -varianceResolution &
+ & ) &
+ & *dble(j -1) &
+ & /dble(countVariance )
+ branchingRateUnconstrainedAnalytic (j)=excursionSetFirstCrossingLinearBarrier_ %rate(varianceParent,variance_(j),time,node)
+ branchingRateUnconstrainedNumerical(j)=excursionSetFirstCrossingFarahiMidpoint_ %rate(varianceParent,variance_(j),time,node)
+ branchingRateConstrainedNumerical (j)=excursionSetFirstCrossingFarahiMidpointBrownianBridge_%rate(varianceParent,variance_(j),time,node)
+ branchingRateConstrainedAnalytic (j)=excursionSetFirstCrossingLinearBarrierBrownianBridge_ %rate(varianceParent,variance_(j),time,node)
+ write (400,*) variance_(j),",",branchingRateUnconstrainedNumerical(j),",",branchingRateConstrainedNumerical(j),",",branchingRateUnconstrainedAnalytic(j),",",branchingRateConstrainedAnalytic(j)
+ end do
+ errorMaximumUnconstrained=maxval( &
+ & abs(branchingRateUnconstrainedNumerical-branchingRateUnconstrainedAnalytic)/branchingRateUnconstrainedAnalytic &
+ & )
+ errorMaximumConstrained =maxval( &
+ & abs(branchingRateConstrainedNumerical -branchingRateConstrainedAnalytic )/branchingRateConstrainedAnalytic , &
+ & mask= variance_ < varianceConstrained*linearGrowthCollisionlessMatter_%value(time)**2*(1.0d0-toleranceVariance) &
+ & .and. &
+ & branchingRateConstrainedAnalytic > 0.0d0 &
+ & )
+ call Assert('Unconstrained case',errorMaximumUnconstrained,3.3d-2,compareLessThan)
+ call Assert('Constrained case' ,errorMaximumConstrained ,3.3d-2,compareLessThan)
+ call Unit_Tests_End_Group()
+ call Unit_Tests_End_Group()
end do
! End unit tests.
call Unit_Tests_End_Group()