Skip to content

Commit

Permalink
ENH(fields): keep track of effective number of points
Browse files Browse the repository at this point in the history
Add a new metadata field `neff` to maps generated by `ScalarField` and
`ComplexField` which tracks the effective number of points that made it
into the map.

Closes: #54
  • Loading branch information
ntessore committed Jan 11, 2024
1 parent 36b4a64 commit 0d4c052
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 9 deletions.
40 changes: 33 additions & 7 deletions heracles/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ async def __call__(
bias = ngal / (4 * np.pi) * mapper.area**2 / nbar**2

# set metadata of array
update_metadata(pos, self, catalog, mapper, nbar=nbar, bias=bias)
update_metadata(pos, self, catalog, mapper, nbar=nbar, bias=bias, fsky=vbar)

# return the position map
return pos
Expand Down Expand Up @@ -320,7 +320,7 @@ async def __call__(

# total weighted variance from online algorithm
ngal = 0
wmean, var = 0.0, 0.0
wmean, w2mean, var = 0.0, 0.0, 0.0

# go through pages in catalogue and map values
async for page in _pages(catalog, progress):
Expand All @@ -338,6 +338,7 @@ async def __call__(
var += (v**2 - var).sum() / ngal
else:
wmean += (w - wmean).sum() / ngal
w2mean += (w**2 - w2mean).sum() / ngal
var += ((w * v) ** 2 - var).sum() / ngal

del lon, lat, v, w
Expand All @@ -347,7 +348,7 @@ async def __call__(

# fix mean weight if there was no column for it
if wcol is None:
wmean = 1.0
wmean = w2mean = 1.0

# compute mean visibility
if catalog.visibility is None:
Expand All @@ -364,8 +365,20 @@ async def __call__(
# compute bias from variance (per object)
bias = 4 * np.pi * vbar**2 * (var / wmean**2) / ngal

# compute the effective number of points
neff = wmean**2 / w2mean * ngal

# set metadata of array
update_metadata(val, self, catalog, mapper, wbar=wbar, bias=bias)
update_metadata(
val,
self,
catalog,
mapper,
wbar=wbar,
bias=bias,
fsky=vbar,
neff=neff,
)

# return the value map
return val
Expand Down Expand Up @@ -398,7 +411,7 @@ async def __call__(

# total weighted variance from online algorithm
ngal = 0
wmean, var = 0.0, 0.0
wmean, w2mean, var = 0.0, 0.0, 0.0

# go through pages in catalogue and get the shear values,
async for page in _pages(catalog, progress):
Expand All @@ -417,6 +430,7 @@ async def __call__(
var += (re**2 + im**2 - var).sum() / ngal
else:
wmean += (w - wmean).sum() / ngal
w2mean += (w**2 - w2mean).sum() / ngal
var += ((w * re) ** 2 + (w * im) ** 2 - var).sum() / ngal

del lon, lat, re, im, w
Expand All @@ -425,7 +439,7 @@ async def __call__(

# set mean weight if there was no column for it
if wcol is None:
wmean = 1.0
wmean = w2mean = 1.0

# compute mean visibility
if catalog.visibility is None:
Expand All @@ -442,8 +456,20 @@ async def __call__(
# bias from measured variance, for E/B decomposition
bias = 2 * np.pi * vbar**2 * (var / wmean**2) / ngal

# compute the effective number of points
neff = wmean**2 / w2mean * ngal

# set metadata of array
update_metadata(val, self, catalog, mapper, wbar=wbar, bias=bias)
update_metadata(
val,
self,
catalog,
mapper,
wbar=wbar,
bias=bias,
fsky=vbar,
neff=neff,
)

# return the shear map
return val
Expand Down
14 changes: 13 additions & 1 deletion heracles/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,27 @@
"spin": "spin weight of map",
"kernel": "mapping kernel of map",
"nside": "NSIDE parameter of HEALPix map",
"nbar": "mean number density",
"wbar": "mean weight density",
"fsky": "sky fraction",
"neff": "effective number of points",
"catalog_1": "catalog of first map",
"spin_1": "spin weight of first map",
"kernel_1": "mapping kernel of first map",
"nside_1": "NSIDE parameter of first HEALPix map",
"nbar_1": "mean number density of first field",
"wbar_1": "mean weight density of first field",
"fsky_1": "sky fraction of first field",
"neff_1": "effective number of points of first field",
"catalog_2": "catalog of second map",
"spin_2": "spin weight of second map",
"kernel_2": "mapping kernel of second map",
"nside_2": "NSIDE parameter of second HEALPix map",
"noisbias": "noise bias of spectrum",
"nbar_2": "mean number density of second field",
"wbar_2": "mean weight density of second field",
"fsky_2": "sky fraction of second field",
"neff_2": "effective number of points of second field",
"bias": "additive bias of spectrum",
}


Expand Down
13 changes: 12 additions & 1 deletion tests/test_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,7 @@ def test_positions(mapper, catalog, vmap):
"kernel": "healpix",
"nside": mapper.nside,
"bias": pytest.approx(bias / nbar**2),
"fsky": 1.0,
}
np.testing.assert_array_equal(m, 0)

Expand All @@ -206,13 +207,15 @@ def test_positions(mapper, catalog, vmap):
"kernel": "healpix",
"nside": mapper.nside,
"bias": pytest.approx(bias / nbar**2),
"fsky": 1.0,
}
np.testing.assert_array_equal(m, 1.0)

# compute overdensity maps with visibility map

catalog.visibility = vmap
nbar /= vmap.mean()
vbar = vmap.mean()
nbar /= vbar

f = Positions("ra", "dec")
m = coroutines.run(f(catalog, mapper))
Expand All @@ -225,6 +228,7 @@ def test_positions(mapper, catalog, vmap):
"kernel": "healpix",
"nside": mapper.nside,
"bias": pytest.approx(bias / nbar**2),
"fsky": vbar,
}

# compute number count map with visibility map
Expand All @@ -240,6 +244,7 @@ def test_positions(mapper, catalog, vmap):
"kernel": "healpix",
"nside": mapper.nside,
"bias": pytest.approx(bias / nbar**2),
"fsky": vbar,
}

# compute overdensity maps with given (incorrect) nbar
Expand All @@ -263,6 +268,7 @@ def test_scalar_field(mapper, catalog):
w = next(iter(catalog))["w"]
v = next(iter(catalog))["g1"]
v2 = ((w * v) ** 2).sum()
neff = w.sum() ** 2 / (w**2).sum()
w = w.reshape(w.size // 4, 4).sum(axis=-1)
wbar = w.mean()
bias = (4 * np.pi / npix / npix) * v2
Expand All @@ -275,6 +281,8 @@ def test_scalar_field(mapper, catalog):
"kernel": "healpix",
"nside": mapper.nside,
"bias": pytest.approx(bias / wbar**2),
"fsky": 1.0,
"neff": pytest.approx(neff),
}
np.testing.assert_array_almost_equal(m, 0)

Expand All @@ -291,6 +299,7 @@ def test_complex_field(mapper, catalog):
re = next(iter(catalog))["g1"]
im = next(iter(catalog))["g2"]
v2 = ((w * re) ** 2 + (w * im) ** 2).sum()
neff = w.sum() ** 2 / (w**2).sum()
w = w.reshape(w.size // 4, 4).sum(axis=-1)
wbar = w.mean()
bias = (4 * np.pi / npix / npix) * v2 / 2
Expand All @@ -303,6 +312,8 @@ def test_complex_field(mapper, catalog):
"kernel": "healpix",
"nside": mapper.nside,
"bias": pytest.approx(bias / wbar**2),
"fsky": 1.0,
"neff": pytest.approx(neff),
}
np.testing.assert_array_almost_equal(m, 0)

Expand Down

0 comments on commit 0d4c052

Please sign in to comment.