Commit 383067fd authored by LUSTIG Peter's avatar LUSTIG Peter

added option to detect without source injection.

parent 7d7df7df
......@@ -57,8 +57,9 @@ def fake_worker(jkiter, min_threshold=2, nsources=8**2, flux=1*u.Jy,
# Actually rather slow... maybe check the code ?
# print(flux)
img.add_gaussian_sources(nsources=nsources, within=within, peak_flux=flux,
cat_gen=cat_gen, **kwargs)
if flux is not None:
img.add_gaussian_sources(nsources=nsources, within=within,
peak_flux=flux, cat_gen=cat_gen, **kwargs)
# ... match filter it ...
mf_img = img.match_filter(img.beam)
......@@ -76,88 +77,14 @@ def fake_worker(jkiter, min_threshold=2, nsources=8**2, flux=1*u.Jy,
plt.close('all')
# DATA_DIR_SERVER = "/data/NIKA/Reduced/G2_COMMON_MODE_ONE_BLOCK/v_1/"
DATA_DIR_SERVER = Path("/data/NIKA/Reduced/"
"HLS091828_common_mode_one_block/v_1")
DATA_DIR_MYPC = Path("/home/peter/Dokumente/Uni/Paris/Stage/data/v_1")
'''
parser = argparse.ArgumentParser()
parser.add_argument("-r", "--raw",
action="store",
dest="rawdirectory", default="mypc",
help="Path of Raw File Directory or Key.")
parser.add_argument("-t", "--threshold",
action="store",
dest="threshold", type=float, default=3.,
help="SNR Detection Threshold")
parser.add_argument("-n", "--nsim", action="store",
dest="nsim", type=int, default=4,
help="Number of Simulations")
parser.add_argument("-c", "--cores", action="store",
dest="cores", type=int, default=1,
help="Number of Cores")
parser.add_argument("-f", "--flux", action="store",
dest="flux", type=float, default=10.,
help="Flux of the Created Sources in mJy")
parser.add_argument("-s", "--sources", action="store",
dest="nsources", type=int, default=64,
help="Number of Sources Injected per Simulation")
# parser.add_argument("-m", "--method", action="store",
# dest="method", default="unknown",
# help="Name of the Reduction Method for Outfile Header")
parser.add_argument("-o", "--outfile", action="store",
dest="outfile", default=None,
help=("Output File Name. If not specified auto-generated"
"from parameters"))
parser.add_argument("-d", "--outdir", action="store",
dest="outdir", default="montecarlo_results/",
help="Output directory. Must exist.")
args = parser.parse_args()
raw = args.rawdirectory
nsim = args.nsim
ncores = args.cores
flux = args.flux * u.mJy
# method = args.method
outfile = args.outfile
outdir = args.outdir
nsources = args.nsources
min_detection_threshold = args.threshold
if raw == "mypc":
DATA_DIR = DATA_DIR_MYPC
elif raw == "server":
DATA_DIR = DATA_DIR_SERVER
else:
DATA_DIR = Path(raw)
if outfile is None:
outfile = ('flux{}mJy_thresh{}_nsim{}.fits'
.format(flux.to_value(unit=u.mJy),
min_detection_threshold,
nsim))
outfile = outdir + outfile
'''
'''
IMAGE = NikaMap.read(('/home/peter/Dokumente/Uni/Paris/'
'Stage/N2CLS/ForMarseille/'
'HLS091828_common_mode_kids_out/map_JK.fits'))
IMAGE.plot()
plt.show(block=True)
print(IMAGE)
'''
flux = np.geomspace(1, 10, 3) * u.mJy
flux = [0]
nsim = 2
min_detection_threshold = 5
min_detection_threshold = 3
nsources = 5
outdir = Path("montecarlo_results/")
outdir = Path("testdir")
......@@ -170,7 +97,7 @@ if not outdir.exists():
outdir.mkdir()
# print(flux)
''' load maps for jackkifing'''
if DATA_DIR_SERVER.exists():
DATA_DIR = DATA_DIR_SERVER
elif DATA_DIR_MYPC.exists():
......@@ -203,72 +130,124 @@ jk_iter = Jackknife(jk_filenames, n=nsim)
nm = jk_iter()
min_source_dist = 2 * nm.beam.fwhm_pix.value
print('Done in {:.2f}s'.format(clock()-t0))
print('Begin Monte Carlo')
jk_iter_list = [jk_iter] * nsim
p = Pool(ncores)
for _flux in flux:
if flux != [0]:
for _flux in flux:
helpfunc = partial(fake_worker, **{'min_threshold':
min_detection_threshold,
'nsources': nsources, 'flux': _flux,
'within': (0, 1),
'cat_gen': pos_uniform,
'dist_threshold': min_source_dist,
'parity_threshold': 0.5})
print('Simulation with {:.2f}...'.format(_flux))
if(1):
res = p.map(helpfunc, jk_iter_list)
res = list(zip(*res))
DETECTED_SOURCES, FAKE_SOURCES = res[:]
else:
DETECTED_SOURCES = []
FAKE_SOURCES = []
with ProgressBar(nsim) as bar:
for iloop in range(nsim):
tmpsource, tmpfakesource = helpfunc(jk_iter)
DETECTED_SOURCES.append(tmpsource)
FAKE_SOURCES.append(tmpfakesource)
bar.update()
# To merge all the fake_sources and sources catalogs
fake_sources = Table()
sources = Table()
for _fake, _detected in zip(FAKE_SOURCES[:], DETECTED_SOURCES[:]):
n_fake = len(fake_sources)
n_detected = len(sources)
if _detected is not None:
_detected['ID'] = _detected['ID'] + n_detected
_detected['fake_sources'] = _detected['fake_sources'] + n_fake
sources = vstack([sources, _detected])
_fake['ID'] = _fake['ID'] + n_fake
_fake['find_peak'] = _fake['find_peak'] + n_detected
fake_sources = vstack([fake_sources, _fake])
fname = ('flux{:.2f}mJy_thresh{}_nsim{}.fits'
.format(_flux.to_value(unit=u.mJy),
min_detection_threshold, nsim))
fname = timeprefix + fname
outfile = outdir / fname
phdu = fits.PrimaryHDU()
phdu.header['influx'] = '{}'.format(_flux)
phdu.header['nsim'] = nsim
phdu.header['sourcespersim'] = nsources
phdu.header['dthresh'] = min_detection_threshold
hdul = [phdu]
if len(sources) > 0:
hdul.append(fits.BinTableHDU(data=sources,
name='Detected_Sources'))
hdul.append(fits.BinTableHDU(data=fake_sources, name='Fake_Sources'))
hdul = fits.HDUList(hdul)
hdul.writeto(outfile, overwrite=False)
print('results written to {}'.format(outfile))
if flux == [0]:
helpfunc = partial(fake_worker, **{'min_threshold':
min_detection_threshold,
'nsources': nsources, 'flux': _flux,
'flux': None,
'within': (0, 1),
'cat_gen': pos_uniform,
'dist_threshold': min_source_dist,
'parity_threshold': 0.5})
print('Simulation with {:.2f}...'.format(_flux))
print('Simulation without sources...')
if(1):
res = p.map(helpfunc, jk_iter_list)
res = list(zip(*res))
DETECTED_SOURCES, FAKE_SOURCES = res[:]
DETECTED_SOURCES = res[0]
else:
DETECTED_SOURCES = []
FAKE_SOURCES = []
with ProgressBar(nsim) as bar:
for iloop in range(nsim):
tmpsource, tmpfakesource = helpfunc(jk_iter)
tmpsource, _ = helpfunc(jk_iter)
DETECTED_SOURCES.append(tmpsource)
FAKE_SOURCES.append(tmpfakesource)
bar.update()
# To merge all the fake_sources and sources catalogs
fake_sources = Table()
sources = Table()
for _fake, _detected in zip(FAKE_SOURCES[:], DETECTED_SOURCES[:]):
n_fake = len(fake_sources)
for _detected in DETECTED_SOURCES:
n_detected = len(sources)
if _detected is not None:
_detected['ID'] = _detected['ID'] + n_detected
_detected['fake_sources'] = _detected['fake_sources'] + n_fake
sources = vstack([sources, _detected])
_fake['ID'] = _fake['ID'] + n_fake
_fake['find_peak'] = _fake['find_peak'] + n_detected
fake_sources = vstack([fake_sources, _fake])
fname = ('flux{:.2f}mJy_thresh{}_nsim{}.fits'
.format(_flux.to_value(unit=u.mJy),
min_detection_threshold, nsim))
fname = ('nosources_thresh{}_nsim{}.fits'
.format(min_detection_threshold, nsim))
fname = timeprefix + fname
outfile = outdir / fname
phdu = fits.PrimaryHDU()
phdu.header['influx'] = '{}'.format(_flux)
phdu.header['influx'] = 0
phdu.header['nsim'] = nsim
phdu.header['sourcespersim'] = nsources
phdu.header['sourcespersim'] = 0
phdu.header['dthresh'] = min_detection_threshold
hdul = [phdu]
if len(sources) > 0:
hdul.append(fits.BinTableHDU(data=sources, name='Detected_Sources'))
hdul.append(fits.BinTableHDU(data=fake_sources, name='Fake_Sources'))
hdul.append(fits.BinTableHDU(data=sources,
name='Detected_Sources'))
hdul = fits.HDUList(hdul)
hdul.writeto(outfile, overwrite=False)
print('results written to {}'.format(outfile))
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment