Skip to content

Commit

Permalink
fix bias in mixing matrices
Browse files Browse the repository at this point in the history
  • Loading branch information
ntessore committed Jan 18, 2024
1 parent 1dd590c commit e95686a
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 28 deletions.
2 changes: 1 addition & 1 deletion heracles/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -669,7 +669,7 @@ def mixmats(
all_alms,
all_alms2,
lmax=l3max,
debias=False,
debias=True,
include=include_masks,
)
# now compute the mixing matrices from these spectra
Expand Down
69 changes: 42 additions & 27 deletions heracles/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ async def __call__(

# total weighted variance from online algorithm
ngal = 0
wmean, w2mean, var = 0.0, 0.0, 0.0
wmean, var = 0.0, 0.0

# go through pages in catalogue and map values
async for page in _pages(catalog, progress):
Expand All @@ -338,7 +338,6 @@ async def __call__(
var += (v**2 - var).sum() / ngal
else:
wmean += (w - wmean).sum() / ngal
w2mean += (w**2 - w2mean).sum() / ngal
var += ((w * v) ** 2 - var).sum() / ngal

del lon, lat, v, w
Expand All @@ -348,7 +347,7 @@ async def __call__(

# fix mean weight if there was no column for it
if wcol is None:
wmean = w2mean = 1.0
wmean = 1.0

# compute mean visibility
if catalog.visibility is None:
Expand All @@ -365,11 +364,8 @@ async def __call__(
# compute bias from variance (per object)
bias = 4 * np.pi * vbar**2 * (var / wmean**2) / ngal

# bias correction factor for intrinsic variance
bcor = 4 * np.pi * vbar**2 * (w2mean / wmean**2) / ngal

# set metadata of array
update_metadata(val, self, catalog, mapper, wbar=wbar, bias=bias, bcor=bcor)
update_metadata(val, self, catalog, mapper, wbar=wbar, bias=bias)

# return the value map
return val
Expand Down Expand Up @@ -402,7 +398,7 @@ async def __call__(

# total weighted variance from online algorithm
ngal = 0
wmean, w2mean, var = 0.0, 0.0, 0.0
wmean, var = 0.0, 0.0

# go through pages in catalogue and get the shear values,
async for page in _pages(catalog, progress):
Expand All @@ -421,7 +417,6 @@ async def __call__(
var += (re**2 + im**2 - var).sum() / ngal
else:
wmean += (w - wmean).sum() / ngal
w2mean += (w**2 - w2mean).sum() / ngal
var += ((w * re) ** 2 + (w * im) ** 2 - var).sum() / ngal

del lon, lat, re, im, w
Expand All @@ -430,7 +425,7 @@ async def __call__(

# set mean weight if there was no column for it
if wcol is None:
wmean = w2mean = 1.0
wmean = 1.0

# compute mean visibility
if catalog.visibility is None:
Expand All @@ -447,11 +442,8 @@ async def __call__(
# bias from measured variance, for E/B decomposition
bias = 2 * np.pi * vbar**2 * (var / wmean**2) / ngal

# bias correction factor for intrinsic field variance
bcor = 2 * np.pi * vbar**2 * (w2mean / wmean**2) / ngal

# set metadata of array
update_metadata(val, self, catalog, mapper, wbar=wbar, bias=bias, bcor=bcor)
update_metadata(val, self, catalog, mapper, wbar=wbar, bias=bias)

# return the shear map
return val
Expand Down Expand Up @@ -513,29 +505,52 @@ async def __call__(
# weight map
wht = np.zeros(mapper.size, mapper.dtype)

# total weighted variance from online algorithm
ngal = 0
wmean, w2mean = 0.0, 0.0

# map catalogue
async for page in _pages(catalog, progress):
lon, lat = page.get(*col)
if wcol is not None:
page.delete(page[wcol] == 0)

if wcol is None:
w = None
else:
w = page.get(wcol)
if page.size:
lon, lat = page.get(*col)

w = page.get(wcol) if wcol is not None else None

mapper.map_values(lon, lat, [wht], None, w)

ngal += page.size
if w is not None:
wmean += (w - wmean).sum() / ngal
w2mean += (w**2 - w2mean).sum() / ngal

mapper.map_values(lon, lat, [wht], None, w)
del lon, lat, w

del page

# set mean weight if there was no column for it
if wcol is None:
wmean = w2mean = 1.0

del page, lon, lat, w
# compute mean visibility
if catalog.visibility is None:
vbar = 1
else:
vbar = np.mean(catalog.visibility)

# compute average weight in nonzero pixels
wbar = wht.mean()
if catalog.visibility is not None:
wbar /= np.mean(catalog.visibility)
# mean weight per effective mapper "pixel"
wbar = ngal / (4 * np.pi * vbar) * wmean * mapper.area

# normalise the map
wht /= wbar

# set metadata of arrays
update_metadata(wht, self, catalog, mapper, wbar=wbar)
# bias from weights
bias = 4 * np.pi * vbar**2 * (w2mean / wmean**2) / ngal

# set metadata of array
update_metadata(wht, self, catalog, mapper, wbar=wbar, bias=bias)

# return the weight map
return wht
Expand Down

0 comments on commit e95686a

Please sign in to comment.