Commit abca5cb8 authored by Peter Lustig's avatar Peter Lustig

Preparation for Parallelisation.

parent b17420e8
......@@ -33,6 +33,13 @@ if '__IPYTHON__' in globals():
plt.ion()
def Wait():
while True:
a=input("[c]ontinue? ")
if a=="c":
break
# DATA_DIR = "/data/NIKA/Reduced/G2_kidpar_20171025s41_v2_LP_md_recal_calUranus/v_1/"
DATA_DIR = "/home/peter/Dokumente/Uni/Paris/Stage/N2CLS/ForMarseille/HLS091828_common_mode_one_block/"
......@@ -77,7 +84,6 @@ def completness_purity_wcs(shape, wcs, bins=30,
flux_range=(0, 1), flux_bins=10, flux_log=False,
threshold_range=(0, 1), threshold_bins=10, threshold_log=False):
"""Build a wcs for the completness_purity function
"""
slice_step = np.ceil(np.asarray(shape) / bins).astype(int)
......@@ -96,7 +102,8 @@ def completness_purity_wcs(shape, wcs, bins=30,
def fake_worker(img, min_threshold=2, nsources=8**2, flux=1*u.Jy, within=(1 / 4, 3 / 4), pos_gen=pos_uniform, **kwargs):
"""The completness purity worker, create a fake dataset from an jackknifed image and return catalogs
"""The completness purity worker, create a fake dataset from an jackknifed image, detect sources
in doped image and return catalogs
Parameters
----------
......@@ -106,6 +113,13 @@ def fake_worker(img, min_threshold=2, nsources=8**2, flux=1*u.Jy, within=(1 / 4,
minimum threshold for the detection
**kwargs :
arguments for source injection
Returns
-------
mf_img.sources :
detected sources table
mf_img.fake_sources :
created fake sources table
"""
# Renormalize the stddev
std = img.check_SNR()
......@@ -150,6 +164,13 @@ def completness_worker(shape, wcs, sources, fake_sources, min_threshold=2, max_t
# covering ALL possible SNR, otherwise loose flux, or cap the thresholds...
fake_snr = np.ma.array(sources[fake_sources['find_peak']]['SNR'],
mask=fake_sources['find_peak'].mask)
#print('-----------------------')
#print(fake_sources['find_peak'].mask)
#print('-----------------------')
#print(fake_sources['find_peak'])
#print('-----------------------')
#print(fake_snr)
#print('-----------------------')
# As we are interested by the cumulative numbers, keep all inside the upper pixel
fake_snr[fake_snr > max_threshold] = max_threshold
......@@ -157,6 +178,14 @@ def completness_worker(shape, wcs, sources, fake_sources, min_threshold=2, max_t
# TODO: Consider keeping all pixels information in fake_source and source...
# This would imply to do only a simple wcs_threshold here...
xx, yy, zz = wcs.wcs_world2pix(fake_sources['ra'], fake_sources['dec'], fake_snr.filled(min_threshold), 0)
##
## Warum werden jetzt ?nicht detektierte fake quellen? mit niedrigster schwelle aufgefüllt?
## Ich muss unbedingt rausbekommen wo diese maskierte spalte herkommt und was sie bedeutet
#print('-----------------------')
#print(xx, yy, zz)
#print('-----------------------')
#print(shape)
#print('-----------------------')
# Number of fake sources recovered
_completness, _ = np.histogramdd(np.asarray([xx, yy, zz]).T + 0.5, bins=np.asarray(shape),
......@@ -256,16 +285,59 @@ def completness_purity(flux, wcs=None, shape=None, nsources=8**2, within=(1 / 4,
return completness, purity
def completness_purity_2(flux, wcs=None, shape=None, nsources=8**2, within=(1 / 4, 3 / 4), nsim=5, jk_map=None):
"""Compute completness map for a given flux"""
print(flux)
# wcs_celestial = wcs.celestial
# Lower and upper edges ... take the center of the pixel for the upper edge
min_threshold, max_threshold = wcs.sub([3]).all_pix2world([-0.5, shape[2]-1], 0)[0]
completness = np.zeros(shape, dtype=np.float)
norm_comp = np.zeros(shape[0:2], dtype=np.float)
purity = np.zeros(shape, dtype=np.float)
norm_pur = np.zeros(shape, dtype=np.float)
#jk_iter = Jackknife(jk_filenames, n=nsim)
for isim in ProgressBar(nsim):
# %load_ext snakeviz
# %snakeviz the following line.... all is spend in the find_peaks / fit_2d_gaussian
# TODO: Change the find_peaks routine, or maybe just the fit_2d_gaussian to be FAST ! (Maybe look into gcntrd.pro routine or photutils.centroid.centroid_1dg maybe ?)
sources, fake_sources = fake_worker(jk_map, nsources=nsources, within=within, flux=flux, pos_gen=pos_uniform, min_threshold=min_threshold, dist_threshold=jk_map.beam.fwhm_pix.value)
# Save global array (MB requests)
FAKE_SOURCES.append(fake_sources)
DETECTED_SOURCES.append(sources)
_completness, _norm_comp = completness_worker(shape, wcs, sources, fake_sources, min_threshold, max_threshold)
completness += _completness
norm_comp += _norm_comp
_purity, _norm_pur = purity_worker(shape, wcs, sources, max_threshold)
purity += _purity
norm_pur += _norm_pur
# norm can be 0, so to avoid warning on invalid values...
#with np.errstate(divide='ignore', invalid='ignore'):
# completness /= norm_comp[..., np.newaxis]
# purity /= norm_pur
# TODO: One should probably return completness AND norm if one want to combine several fluxes
return completness, norm_comp, purity, norm_pur
plt.close('all')
filenames = list(Path(DATA_DIR).glob('../*/map.fits'))
#filenames = DATA_DIR+'map.fits'
#print(type(filenames))
data = NikaMap.read(Path(DATA_DIR) / 'map.fits')
print(data)
data = NikaMap.read(Path(DATA_DIR) / 'map_JK.fits')
#
# _data = next(Jackknife(filenames, n=None))
# # TODO: Should in principle be the same, but is not... check....
......@@ -292,6 +364,7 @@ shape_4D, wcs_4D = completness_purity_wcs(data.shape, data.wcs, bins=20,
threshold_range=threshold_range, threshold_bins=threshold_bins)
print(wcs_4D)
# Testing the lower edges
wcs_threshold = wcs_4D.sub([3])
assert np.all(np.abs(wcs_threshold.all_pix2world(np.arange(threshold_bins+1)-0.5, 0) - threshold_edges) < 1e-15)
......@@ -306,16 +379,42 @@ DETECTED_SOURCES = []
# flux, nsources, within, wcs, shape, nsim, jk_filenames = 10*u.mJy, 8**2, (0, 1), wcs_4D.sub([1, 2, 3]), shape_4D[0:3], np.multiply(*shape_4D[0:2]) * 100, filenames
# This is a single run check for a single flux
completness, purity = completness_purity(flux=10*u.mJy, nsources=8**2, within=(0, 1),
wcs=wcs_4D.sub([1, 2, 3]),
shape=shape_4D[0:3], nsim=np.multiply(*shape_4D[0:2]) * 100,
jk_filenames=filenames)
nsim = 2
pool = Pool(cpu_count())
p_completness_purity_2 = partial(completness_purity_2, **{'nsources'=8**2, 'within'=(0, 1),
'wcs'=wcs_4D.sub([1, 2, 3]),
'shape'=shape_4D[0:3], 'nsim'=nsim,
'jk_map'=data})
#completness, purity = completness_purity_2(flux=10*u.mJy)
# TODO:
# Prepare function for multiprocessing
# calculate sum of results of each run and divide sums by norms
#
#
#completness, purity = completness_purity_2(flux=10*u.mJy, nsources=8**2, within=(0, 1),
# wcs=wcs_4D.sub([1, 2, 3]),
# shape=shape_4D[0:3], nsim=nsim,
# jk_map=data)
#shape=shape_4D[0:3], nsim=np.multiply(*shape_4D[0:2]) * 100,
fig, axes = plt.subplots(nrows=2, ncols=threshold_bins, sharex=True, sharey=True)
for i in range(threshold_bins):
axes[0, i].imshow(completness[:, :, i], vmin=0, vmax=1)
axes[1, i].imshow(purity[:, :, i], vmin=0, vmax=1)
axes[1, i].set_xlabel("thresh={:.2f}".format(threshold[i]))
axes[0, 0].set_title("{} simulations".format(nsim))
axes[0, 0].set_ylabel("completness")
axes[1, 0].set_ylabel("purity")
plt.show()
Wait()
'''
# To merge all the fake_sources and sources catalogs
fake_sources = FAKE_SOURCES[0]
sources = DETECTED_SOURCES[0]
......@@ -330,14 +429,15 @@ for _fake, _detected in zip(FAKE_SOURCES[1:], DETECTED_SOURCES[1:]):
fake_sources = vstack([fake_sources, _fake])
sources = vstack([sources, _detected])
# The full Monte Carlo checks
FAKE_SOURCES = []
DETECTED_SOURCES = []
pool = Pool(cpu_count())
p_completness_parity = partial(completness_purity, **{'wcs': wcs.sub([1, 2, 3]), 'shape': shape[0:3], 'jk_filenames': filenames})
#p_completness_parity = partial(completness_purity, **{'wcs': wcs.sub([1, 2, 3]), 'shape': shape[0:3], 'jk_filenames': filenames})
p_completness_parity = partial(completness_purity_2, **{'wcs': wcs_4D.sub([1, 2, 3]), 'shape': shape_4D[0:3], 'jk_map': data})
result = pool.map(p_completness_parity, fluxes)
completness = np.moveaxis(np.asarray([_completness for _completness, _purity in result]), 0, 3)
purity = np.moveaxis(np.asarray([_purity for _completness, _purity in result]), 0, 3)
'''
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment