Commit 9bed05e4 authored by LUSTIG Peter's avatar LUSTIG Peter

Processing flux list when executed. Added minimum

distance between random sources.
parent 25ec5816
......@@ -19,8 +19,10 @@ from scipy.optimize import curve_fit
from nikamap import NikaMap, Jackknife
from nikamap.utils import pos_uniform
from astropy.io import fits
from time import clock
import argparse
import sys
import datetime
'''
%load_ext autoreload
%autoreload 2
......@@ -76,9 +78,10 @@ plt.close('all')
# DATA_DIR_SERVER = "/data/NIKA/Reduced/G2_COMMON_MODE_ONE_BLOCK/v_1/"
DATA_DIR_SERVER = "/data/NIKA/Reduced/HLS091828_common_mode_one_block/v_1"
DATA_DIR_MYPC = "/home/peter/Dokumente/Uni/Paris/Stage/data/v_1"
DATA_DIR_SERVER = Path("/data/NIKA/Reduced/"
"HLS091828_common_mode_one_block/v_1")
DATA_DIR_MYPC = Path("/home/peter/Dokumente/Uni/Paris/Stage/data/v_1")
'''
parser = argparse.ArgumentParser()
parser.add_argument("-r", "--raw",
action="store",
......@@ -101,11 +104,11 @@ parser.add_argument("-s", "--sources", action="store",
dest="nsources", type=int, default=64,
help="Number of Sources Injected per Simulation")
'''
parser.add_argument("-m", "--method", action="store",
dest="method", default="unknown",
help="Name of the Reduction Method for Outfile Header")
'''
# parser.add_argument("-m", "--method", action="store",
# dest="method", default="unknown",
# help="Name of the Reduction Method for Outfile Header")
parser.add_argument("-o", "--outfile", action="store",
dest="outfile", default=None,
help=("Output File Name. If not specified auto-generated"
......@@ -131,7 +134,7 @@ if raw == "mypc":
elif raw == "server":
DATA_DIR = DATA_DIR_SERVER
else:
DATA_DIR = raw
DATA_DIR = Path(raw)
if outfile is None:
outfile = ('flux{}mJy_thresh{}_nsim{}.fits'
......@@ -141,6 +144,8 @@ if outfile is None:
outfile = outdir + outfile
'''
'''
IMAGE = NikaMap.read(('/home/peter/Dokumente/Uni/Paris/'
'Stage/N2CLS/ForMarseille/'
......@@ -149,81 +154,121 @@ IMAGE.plot()
plt.show(block=True)
print(IMAGE)
'''
flux = np.geomspace(1, 10, 3) * u.mJy
nsim = 2
min_detection_threshold = 5
nsources = 5
outdir = Path("montecarlo_results/")
outdir = Path("testdir")
ncores = 2
timeprefix = '{date:%y%m%d_%H%M%S}_'.format(date=datetime.datetime.now())
if not outdir.exists():
print('creating directory {}'.format(outdir))
outdir.mkdir()
# print(flux)
if DATA_DIR_SERVER.exists():
DATA_DIR = DATA_DIR_SERVER
elif DATA_DIR_MYPC.exists():
DATA_DIR = DATA_DIR_MYPC
else:
sys.exit("Raw data path not found. Exit.")
jk_filenames = list(Path(DATA_DIR).glob('*/map.fits'))
for i in range(len(jk_filenames)):
jk_filenames[i] = str(jk_filenames[i])
message = '''\
###############################################################
# Running Monte Carlo Tests #
# #
# Number of Simulations: {:5d} #
# No Simulations per flux: {:5d} #
# Number of CPUs used: {:5d} #
# Minimum Detection Threshold:{:5.1f} #
# Flux of Injected Sources: {:5.1f} #
# No Different Fluxes: {:5d} #
# Number of Injected Sources: {:5d} #
###############################################################
'''.format(nsim, ncores, min_detection_threshold, flux, nsources)
print(message)
jk_filenames = list(Path(DATA_DIR).glob('*/map.fits'))
for i in range(len(jk_filenames)):
jk_filenames[i] = str(jk_filenames[i])
'''.format(nsim, ncores, min_detection_threshold, len(flux), nsources)
FAKE_SOURCES = []
DETECTED_SOURCES = []
print(message, "\n")
print("Creating Jackkife Object")
t0 = clock()
jk_iter = Jackknife(jk_filenames, n=nsim)
jk_iter_list = [jk_iter] * nsim
# print(type(jk_iter()))
p = Pool(ncores)
nm = jk_iter()
min_source_dist = 2 * nm.beam.fwhm_pix.value
helpfunc = partial(fake_worker, **{'min_threshold': min_detection_threshold,
'nsources': nsources, 'flux': flux,
'within': (0, 1), 'cat_gen': pos_uniform,
'parity_threshold': 0.5})
# sources, fake_sources = helpfunc(jk_iter_list)
print('Done in {:.2f}s'.format(clock()-t0))
print('Begin Monte Carlo')
if(1):
res = p.map(helpfunc, jk_iter_list)
res = list(zip(*res))
jk_iter_list = [jk_iter] * nsim
DETECTED_SOURCES, FAKE_SOURCES = res[:]
else:
DETECTED_SOURCES = []
FAKE_SOURCES = []
with ProgressBar(nsim) as bar:
for iloop in range(nsim):
tmpsource, tmpfakesource = helpfunc(jk_iter)
DETECTED_SOURCES.append(tmpsource)
FAKE_SOURCES.append(tmpfakesource)
bar.update()
# To merge all the fake_sources and sources catalogs
fake_sources = Table()
sources = Table()
for _fake, _detected in zip(FAKE_SOURCES[:], DETECTED_SOURCES[:]):
n_fake = len(fake_sources)
n_detected = len(sources)
if _detected is not None:
_detected['ID'] = _detected['ID'] + n_detected
_detected['fake_sources'] = _detected['fake_sources'] + n_fake
sources = vstack([sources, _detected])
_fake['ID'] = _fake['ID'] + n_fake
_fake['find_peak'] = _fake['find_peak'] + n_detected
fake_sources = vstack([fake_sources, _fake])
phdu = fits.PrimaryHDU()
phdu.header['influx'] = '{}'.format(flux)
phdu.header['nsim'] = nsim
phdu.header['sourcespersim'] = nsources
phdu.header['dthresh'] = min_detection_threshold
hdul = [phdu]
if len(sources) > 0:
hdul.append(fits.BinTableHDU(data=sources, name='Detected_Sources'))
hdul.append(fits.BinTableHDU(data=fake_sources, name='Fake_Sources'))
hdul = fits.HDUList(hdul)
hdul.writeto(outfile, overwrite=True)
for _flux in flux:
p = Pool(ncores)
helpfunc = partial(fake_worker, **{'min_threshold':
min_detection_threshold,
'nsources': nsources, 'flux': _flux,
'within': (0, 1),
'cat_gen': pos_uniform,
'dist_threshold': min_source_dist,
'parity_threshold': 0.5})
print('Simulation with {:.2f}...'.format(_flux))
if(1):
res = p.map(helpfunc, jk_iter_list)
res = list(zip(*res))
DETECTED_SOURCES, FAKE_SOURCES = res[:]
else:
DETECTED_SOURCES = []
FAKE_SOURCES = []
with ProgressBar(nsim) as bar:
for iloop in range(nsim):
tmpsource, tmpfakesource = helpfunc(jk_iter)
DETECTED_SOURCES.append(tmpsource)
FAKE_SOURCES.append(tmpfakesource)
bar.update()
# To merge all the fake_sources and sources catalogs
fake_sources = Table()
sources = Table()
for _fake, _detected in zip(FAKE_SOURCES[:], DETECTED_SOURCES[:]):
n_fake = len(fake_sources)
n_detected = len(sources)
if _detected is not None:
_detected['ID'] = _detected['ID'] + n_detected
_detected['fake_sources'] = _detected['fake_sources'] + n_fake
sources = vstack([sources, _detected])
_fake['ID'] = _fake['ID'] + n_fake
_fake['find_peak'] = _fake['find_peak'] + n_detected
fake_sources = vstack([fake_sources, _fake])
fname = ('flux{:.2f}mJy_thresh{}_nsim{}.fits'
.format(_flux.to_value(unit=u.mJy),
min_detection_threshold, nsim))
fname = timeprefix + fname
outfile = outdir / fname
phdu = fits.PrimaryHDU()
phdu.header['influx'] = '{}'.format(flux)
phdu.header['nsim'] = nsim
phdu.header['sourcespersim'] = nsources
phdu.header['dthresh'] = min_detection_threshold
hdul = [phdu]
if len(sources) > 0:
hdul.append(fits.BinTableHDU(data=sources, name='Detected_Sources'))
hdul.append(fits.BinTableHDU(data=fake_sources, name='Fake_Sources'))
hdul = fits.HDUList(hdul)
hdul.writeto(outfile, overwrite=False)
print('results written to {}'.format(outfile))
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment