completness.py 14.5 KB
Newer Older
Peter Lustig's avatar
Peter Lustig committed
1 2 3 4 5 6 7
from __future__ import absolute_import, division, print_function

from pathlib import Path
import os
import numpy as np
import matplotlib.pyplot as plt

8
#from multiprocess import Pool, cpu_count
Peter Lustig's avatar
Peter Lustig committed
9 10 11 12 13 14
from functools import partial

from astropy import units as u
from astropy.io import ascii
from astropy.wcs import WCS
from astropy.utils.console import ProgressBar
15
from astropy.table import Table, vstack
Peter Lustig's avatar
Peter Lustig committed
16 17 18 19 20 21 22 23 24

from scipy.optimize import curve_fit

from nikamap import NikaMap, Jackknife
from nikamap.utils import pos_uniform

from IPython import get_ipython
ipython = get_ipython()

Peter Lustig's avatar
Peter Lustig committed
25 26
import myfunctions as mfct
from time import clock
Peter Lustig's avatar
Peter Lustig committed
27
from astropy.io import fits
LUSTIG Peter's avatar
LUSTIG Peter committed
28
from mpl_toolkits.axes_grid1 import make_axes_locatable
Peter Lustig's avatar
Peter Lustig committed
29

Peter Lustig's avatar
Peter Lustig committed
30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
if '__IPYTHON__' in globals():
    ipython.magic('load_ext autoreload')
    ipython.magic('autoreload 2')
    ipython.magic('matplotlib tk')

def add_axis(name, range, bins, unit=None, i_axe=3, log=False):
    """Define a dictionnary for additionnal wcs axes (linear or log)"""

    header = {'CTYPE{}'.format(i_axe): name,
              'CRPIX{}'.format(i_axe): 1,
              'CUNIT{}'.format(i_axe): unit}

    if log:
        # Log scale (edges definition)
        log_step = (np.log(range[1]) - np.log(range[0])) / bins
        center_start = np.exp(np.log(range[0]) + log_step / 2)
        header['CTYPE{}'.format(i_axe)] += '-LOG'
        header['CRVAL{}'.format(i_axe)] = center_start
        header['CDELT{}'.format(i_axe)] = log_step * center_start

        # Log scale (center definition)
        # log_step = (np.log(flux_range[1]) - np.log(flux_range[0])) / (bins-1)
        # center_start = range[0]

    else:
        # Linear scale (edges definition)
        step = (range[1] - range[0]) / bins
        header['CRVAL{}'.format(i_axe)] = range[0] + step / 2
        header['CDELT{}'.format(i_axe)] = step

        # Linear scale (center definition)
        # step = (range[1] - range[0]) / (bins-1)

    return header


def completness_purity_wcs(shape, wcs, bins=30,
                           flux_range=(0, 1), flux_bins=10, flux_log=False,
                           threshold_range=(0, 1), threshold_bins=10, threshold_log=False):
    """Build a wcs for the completness_purity function
	"""

    slice_step = np.ceil(np.asarray(shape) / bins).astype(int)
    celestial_slice = slice(0, shape[0], slice_step[0]), slice(0, shape[1], slice_step[1])

    # [WIP]: Shall we use a 4D WCS ? (ra/dec flux/threshold)
    # [WIP]: -TAB does not seems to be very easy to do with astropy
    # Basicaly Working... .
    header = wcs[celestial_slice[0], celestial_slice[1]].to_header()
    header['WCSAXES'] = 4

    header.update(add_axis('THRESHOLD', threshold_range, threshold_bins, i_axe=3))
    header.update(add_axis('FLUX', flux_range, flux_bins, i_axe=4, log=True))

    return (bins, bins, threshold_bins, flux_bins), WCS(header)


87
def fake_worker(img, min_threshold=2, nsources=8**2, flux=1*u.Jy, within=(1 / 4, 3 / 4), pos_gen=pos_uniform, photmethod='peak',**kwargs):
88 89
    """The completness purity worker, create a fake dataset from an jackknifed image, detect sources
	   in doped image and return catalogs
Peter Lustig's avatar
Peter Lustig committed
90 91 92 93 94 95 96 97 98

    Parameters
    ----------
    img : :class:`nikamap.NikaMap`
        Jackknifed dataset
    min_threshold : float
        minimum threshold for the detection
    **kwargs :
        arguments for source injection
99 100 101 102 103 104 105

    Returns
    -------
    mf_img.sources :
		detected sources table
    mf_img.fake_sources :
		created fake sources table
Peter Lustig's avatar
Peter Lustig committed
106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
    """
    # Renormalize the stddev
    std = img.check_SNR()
    img.uncertainty.array *= std

    # Actually rather slow... maybe check the code ?
    img.add_gaussian_sources(nsources=nsources, within=within, peak_flux=flux, pos_gen=pos_gen, **kwargs)

    # ... match filter it ...
    mf_img = img.match_filter(img.beam)
    mf_img.uncertainty.array *= mf_img.check_SNR()

    # ... and detect sources with the lowest threshold...
    # The gaussian fit from subpixel=True is very slow here...
    mf_img.detect_sources(threshold=min_threshold)
121 122 123 124 125 126 127
    
    FluxResolution(mf_img, method=photmethod)

    
    mf_img.phot_sources(peak=True, psf=False)
    
    #flux = FluxResolution(mf_img)
Peter Lustig's avatar
Peter Lustig committed
128 129 130

    return mf_img.sources, mf_img.fake_sources

131 132 133 134 135 136 137 138 139 140 141
def FluxResolution(nm, method='peak'):
    if method== 'peak':
        peak, psf = True, False
    else:
        peak, psf = False, True

    nm.phot_sources(sources=nm.sources, peak=peak, psf=psf)
    
    #np.savetxt('fluxhisttest.txt', nm.sources['flux_'+method])
    return 0

Peter Lustig's avatar
Peter Lustig committed
142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167

def completness_worker(shape, wcs, sources, fake_sources, min_threshold=2, max_threshold=5):
    """Compute completness from the fake source catalog

    Parameters
    ----------
    shape : tuple
        the shape of the resulting image
    sources : :class:`astropy.table.Table`
        the detected sources
    fake_sources : :class:`astropy.table.Table`
        the fake sources table, with corresponding mask
    min_threshold : float
        the minimum SNR threshold requested
    max_threshold : float
        the maximum SNR threshold requested

    Returns
    -------
    _completness, _norm_comp
        corresponding 2D :class:`numpy.ndarray`
    """
    # If one wanted to used a histogramdd, one would need a threshold axis
    # covering ALL possible SNR, otherwise loose flux, or cap the thresholds...
    fake_snr = np.ma.array(sources[fake_sources['find_peak']]['SNR'],
                           mask=fake_sources['find_peak'].mask)
Peter Lustig's avatar
Peter Lustig committed
168

Peter Lustig's avatar
Peter Lustig committed
169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228

    # As we are interested by the cumulative numbers, keep all inside the upper pixel
    fake_snr[fake_snr > max_threshold] = max_threshold

    # TODO: Consider keeping all pixels information in fake_source and source...
    #       This would imply to do only a simple wcs_threshold here...
    xx, yy, zz = wcs.wcs_world2pix(fake_sources['ra'], fake_sources['dec'], fake_snr.filled(min_threshold), 0)

    # Number of fake sources recovered
    _completness, _ = np.histogramdd(np.asarray([xx, yy, zz]).T + 0.5, bins=np.asarray(shape),
                                     range=list(zip([0]*len(shape), shape)),
                                     weights=~fake_sources['find_peak'].mask)
    # Reverse cumulative sum to get all sources at the given threshold
    _completness = np.cumsum(_completness[..., ::-1], axis=2)[..., ::-1]

    # Number of fake sources (independant of threshold)
    _norm_comp, _, _ = np.histogram2d(xx + 0.5, yy + 0.5, bins=np.asarray(shape[0:2]),
                                      range=list(zip([0]*2, shape[0:2])))
    return _completness, _norm_comp


def purity_worker(shape, wcs, sources, max_threshold=2):
    """Compute completness from the fake source catalog

    Parameters
    ----------
    shape : tuple
        the shape of the resulting image
    sources : :class:`astropy.table.Table`
        the detected sources table, with corresponding match
    max_threshold : float
        the maximum threshold requested

    Returns
    -------
    _completness, _norm_comp
        corresponding 2D :class:`numpy.ndarray`
    """

    sources_snr = sources['SNR']
    # As we are interested by the cumulative numbers, keep all inside the upper pixel
    sources_snr[sources_snr > max_threshold] = max_threshold
    xx, yy, zz = wcs.wcs_world2pix(sources['ra'], sources['dec'], sources_snr, 0)

    # Number of fake sources recovered
    _purity, _ = np.histogramdd(np.asarray([xx, yy, zz]).T + 0.5, bins=np.asarray(shape),
                                range=list(zip([0]*len(shape), shape)),
                                weights=~sources['fake_sources'].mask)

    # Revese cumulative sum...
    _purity = np.cumsum(_purity[..., ::-1], axis=2)[..., ::-1]

    # Number of total detected sources at a given threshold
    _norm_pur, _ = np.histogramdd(np.asarray([xx, yy, zz]).T + 0.5, bins=np.asarray(shape),
                                  range=list(zip([0]*len(shape), shape)))
    _norm_pur = np.cumsum(_norm_pur[..., ::-1], axis=2)[..., ::-1]

    return _purity, _norm_pur


229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275
#def completness_purity(flux, wcs=None, shape=None, nsources=8**2, within=(1 / 4, 3 / 4), nsim=500, jk_filenames=None):
#    """Compute completness map for a given flux"""
#
#    print(flux)
#    #fluxlist=[]
#
#    # wcs_celestial = wcs.celestial
#    # Lower and upper edges ... take the center of the pixel for the upper edge
#    min_threshold, max_threshold = wcs.sub([3]).all_pix2world([-0.5, shape[2]-1], 0)[0]
#
#    completness = np.zeros(shape, dtype=np.float)
#    norm_comp = np.zeros(shape[0:2], dtype=np.float)
#
#    purity = np.zeros(shape, dtype=np.float)
#    norm_pur = np.zeros(shape, dtype=np.float)
#
#    jk_iter = Jackknife(jk_filenames, n=nsim)
#    print("Begin loop")
#
#    for img in ProgressBar(jk_iter):
#        print("In loop")
#        # %load_ext snakeviz
#        # %snakeviz the following line.... all is spend in the find_peaks / fit_2d_gaussian
#        # TODO: Change the find_peaks routine, or maybe just the fit_2d_gaussian to be FAST ! (Maybe look into gcntrd.pro routine or photutils.centroid.centroid_1dg maybe ?)
#        sources, fake_sources = fake_worker(img, nsources=nsources, within=within, flux=flux, pos_gen=pos_uniform, min_threshold=min_threshold, dist_threshold=img.beam.fwhm_pix.value)
#
#        # Save global array (MB requests)
#        #FAKE_SOURCES.append(fake_sources)
#        #DETECTED_SOURCES.append(sources)
#
#        _completness, _norm_comp = completness_worker(shape, wcs, sources, fake_sources, min_threshold, max_threshold)
#
#        completness += _completness
#        norm_comp += _norm_comp
#
#        _purity, _norm_pur = purity_worker(shape, wcs, sources, max_threshold)
#
#        purity += _purity
#        norm_pur += _norm_pur
#
#    # norm can be 0, so to avoid warning on invalid values...
#    with np.errstate(divide='ignore', invalid='ignore'):
#        completness /= norm_comp[..., np.newaxis]
#        purity /= norm_pur
#
#    # TODO: One should probably return completness AND norm if one want to combine several fluxes
#    return completness, purity
Peter Lustig's avatar
Peter Lustig committed
276 277


Peter Lustig's avatar
Peter Lustig committed
278
def completness_purity_2(flux, nsim=2, wcs=None, shape=None, nsources=8**2, within=(1 / 4, 3 / 4), jk_map=None):
279 280
    """Compute completness map for a given flux"""

LUSTIG Peter's avatar
LUSTIG Peter committed
281

Peter Lustig's avatar
Peter Lustig committed
282
    #print(flux)
283 284 285 286 287 288 289 290 291 292

    # wcs_celestial = wcs.celestial
    # Lower and upper edges ... take the center of the pixel for the upper edge
    min_threshold, max_threshold = wcs.sub([3]).all_pix2world([-0.5, shape[2]-1], 0)[0]

    completness = np.zeros(shape, dtype=np.float)
    norm_comp = np.zeros(shape[0:2], dtype=np.float)

    purity = np.zeros(shape, dtype=np.float)
    norm_pur = np.zeros(shape, dtype=np.float)
293
    sourcetable = Table()
294 295 296 297 298 299 300 301

    #jk_iter = Jackknife(jk_filenames, n=nsim)

    for isim in ProgressBar(nsim):
        # %load_ext snakeviz
        # %snakeviz the following line.... all is spend in the find_peaks / fit_2d_gaussian
        # TODO: Change the find_peaks routine, or maybe just the fit_2d_gaussian to be FAST ! (Maybe look into gcntrd.pro routine or photutils.centroid.centroid_1dg maybe ?)
        sources, fake_sources = fake_worker(jk_map, nsources=nsources, within=within, flux=flux, pos_gen=pos_uniform, min_threshold=min_threshold, dist_threshold=jk_map.beam.fwhm_pix.value)
302
        sourcetable = vstack([sourcetable,sources])
303 304

        # Save global array (MB requests)
305 306
        #FAKE_SOURCES.append(fake_sources)
        #DETECTED_SOURCES.append(sources)
307 308 309 310 311 312 313 314 315 316

        _completness, _norm_comp = completness_worker(shape, wcs, sources, fake_sources, min_threshold, max_threshold)

        completness += _completness
        norm_comp += _norm_comp

        _purity, _norm_pur = purity_worker(shape, wcs, sources, max_threshold)

        purity += _purity
        norm_pur += _norm_pur
317 318
        
        
319 320 321 322 323 324 325

    # norm can be 0, so to avoid warning on invalid values...
    #with np.errstate(divide='ignore', invalid='ignore'):
    #    completness /= norm_comp[..., np.newaxis]
    #    purity /= norm_pur

    # TODO: One should probably return completness AND norm if one want to combine several fluxes
326
    return completness, norm_comp, purity, norm_pur, sourcetable
327 328


Peter Lustig's avatar
Peter Lustig committed
329

330
def Plot_CompPur(completness, purity, threshold,nsim=None, savename=None):
Peter Lustig's avatar
Peter Lustig committed
331 332 333 334
    threshold_bins = completness.shape[-1]
    fig, axes = plt.subplots(nrows=2, ncols=threshold_bins, sharex=True, sharey=True)
    for i in range(threshold_bins):
        axes[0, i].imshow(completness[:, :, i], vmin=0, vmax=1)
LUSTIG Peter's avatar
LUSTIG Peter committed
335
        im=axes[1, i].imshow(purity[:, :, i], vmin=0, vmax=1)
Peter Lustig's avatar
Peter Lustig committed
336
        axes[1, i].set_xlabel("thresh={:.2f}".format(threshold[i]))
LUSTIG Peter's avatar
LUSTIG Peter committed
337
        if i == (threshold_bins-1):
LUSTIG Peter's avatar
LUSTIG Peter committed
338
            #print('-----------')
LUSTIG Peter's avatar
LUSTIG Peter committed
339
            divider = make_axes_locatable(axes[1,i])
LUSTIG Peter's avatar
LUSTIG Peter committed
340
            cax = divider.append_axes('right', size='5%', pad=0.0)
LUSTIG Peter's avatar
LUSTIG Peter committed
341 342
            fig=plt.gcf()
            fig.colorbar(im, cax=cax, orientation='vertical')
Peter Lustig's avatar
Peter Lustig committed
343 344 345 346
    if nsim is not None:
        axes[0, 0].set_title("{} simulations".format(nsim))
    axes[0, 0].set_ylabel("completness")
    axes[1, 0].set_ylabel("purity")
347 348
    if savename is not None:
        plt.savefig(savename)
LUSTIG Peter's avatar
LUSTIG Peter committed
349
    #plt.show(block=True)
Peter Lustig's avatar
Peter Lustig committed
350 351


Peter Lustig's avatar
Peter Lustig committed
352

Peter Lustig's avatar
Peter Lustig committed
353 354 355 356 357 358
        
    # Save result !!
    
    


359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374

#completness, purity = completness_purity_2(flux=10*u.mJy)


# TODO:
# Prepare function for multiprocessing
# calculate sum of results of each run and divide sums by norms
# 
# 

#completness, purity = completness_purity_2(flux=10*u.mJy, nsources=8**2, within=(0, 1),
#                                         wcs=wcs_4D.sub([1, 2, 3]),
#                                         shape=shape_4D[0:3], nsim=nsim,
#                                         jk_map=data)

#shape=shape_4D[0:3], nsim=np.multiply(*shape_4D[0:2]) * 100,
Peter Lustig's avatar
Peter Lustig committed
375

376 377

'''
Peter Lustig's avatar
Peter Lustig committed
378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396
# To merge all the fake_sources and sources catalogs
fake_sources = FAKE_SOURCES[0]
sources = DETECTED_SOURCES[0]
for _fake, _detected in zip(FAKE_SOURCES[1:], DETECTED_SOURCES[1:]):
    n_fake = len(fake_sources)
    n_detected = len(sources)
    _fake['ID'] = _fake['ID'] + n_fake
    _detected['ID'] = _detected['ID'] + n_detected
    _fake['find_peak'] = _fake['find_peak'] + n_detected
    _detected['fake_sources'] = _detected['fake_sources'] + n_fake

    fake_sources = vstack([fake_sources, _fake])
    sources = vstack([sources, _detected])

# The full Monte Carlo checks
FAKE_SOURCES = []
DETECTED_SOURCES = []

pool = Pool(cpu_count())
397 398
#p_completness_parity = partial(completness_purity, **{'wcs': wcs.sub([1, 2, 3]), 'shape': shape[0:3], 'jk_filenames': filenames})
p_completness_parity = partial(completness_purity_2, **{'wcs': wcs_4D.sub([1, 2, 3]), 'shape': shape_4D[0:3], 'jk_map': data})
Peter Lustig's avatar
Peter Lustig committed
399 400 401 402
result = pool.map(p_completness_parity, fluxes)

completness = np.moveaxis(np.asarray([_completness for _completness, _purity in result]), 0, 3)
purity = np.moveaxis(np.asarray([_purity for _completness, _purity in result]), 0, 3)
403
'''