Skip to content

Commit 61a3ccd

Browse files
author
Serena Giardiello
committed
adding codes
1 parent 06957cc commit 61a3ccd

File tree

7 files changed

+664
-10
lines changed

7 files changed

+664
-10
lines changed

project/SO/pISO/python/dust/global_dust.dict

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,16 @@ kspace_tf_path = release_dir + "pspipe/kspace_tf/dust_binning_50/"
1010
sim_spec_dir = release_dir + "pspipe/sim_spectra/dust_binning_50/"
1111
source_catalog = release_dir + "pspipe/source_catalog/cat_skn_multifreq_20220526_nightonly.txt"
1212

13+
mcm_dir = "/scratch/c.spxsg6/iso/dust/mcms"
14+
alms_dir = "/scratch/c.spxsg6/iso/dust/alms"
15+
spec_dir = "/scratch/c.spxsg6/iso/dust/spectra"
16+
best_fits_dir = "/scratch/c.spxsg6/iso/dust/best_fits"
17+
plots_base_dir = "/scratch/c.spxsg6/iso/dust/plots"
18+
ps_model_dir = "/scratch/c.spxsg6/iso/dust/noise_model"
19+
sq_win_alms_dir = "/scratch/c.spxsg6/iso/dust/sq_win_alms"
20+
cov_dir = "/scratch/c.spxsg6/iso/dust/covariances"
21+
montecarlo_dir = "/scratch/c.spxsg6/iso/dust/montecarlo"
22+
1323
deconvolve_pixwin = True
1424
pixwin_Planck = {"pix": 'HEALPIX', "nside": 2048}
1525

@@ -31,8 +41,8 @@ n_splits_Planck = 2
3141
# get from running pipeline_dust.yml
3242
# then extract the patch given by window_so_i5_f220_baseline.fits
3343
# enmap.extract(existing_planck_maps, window.shape, window.wcs)
34-
maps_Planck_f143 = ["planck_projected/npipe6v20%s_f143_map_srcfree_deep56_patch.fits" % split for split in ["A", "B"]]
35-
maps_Planck_f353 = ["planck_projected/npipe6v20%s_f353_map_srcfree_deep56_patch.fits" % split for split in ["A", "B"]]
44+
maps_Planck_f143 = ["planck_projected/npipe6v20%s_f143_map_deep56_patch_srcfree.fits" % split for split in ["A", "B"]]
45+
maps_Planck_f353 = ["planck_projected/npipe6v20%s_f353_map_deep56_patch_srcfree.fits" % split for split in ["A", "B"]]
3646

3747
cal_Planck_f143 = 1.
3848
cal_Planck_f353 = 1.
@@ -64,8 +74,9 @@ cross_link_threshold = 0.97
6474
n_med_ivar = 3
6575

6676
mask_dir = release_dir + "pspipe/masks/"
67-
ps_mask_Planck_f143 = mask_dir + "source_mask_15mJy_and_dust_rad12.fits"
68-
ps_mask_Planck_f353 = mask_dir + "source_mask_15mJy_and_dust_rad12.fits"
77+
# also these have to be projected over the patch
78+
ps_mask_Planck_f143 = mask_dir + "source_mask_15mJy_and_dust_rad12_deep56.fits"
79+
ps_mask_Planck_f353 = mask_dir + "source_mask_15mJy_and_dust_rad12_deep56.fits"
6980

7081
window_dir = release_dir + "windows/"
7182

project/SO/pISO/python/get_covariance_blocks.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,12 @@
1212

1313
log = log.get_logger(**d)
1414

15-
mcms_dir = "mcms"
16-
spectra_dir = "spectra"
17-
noise_dir = "noise_model"
18-
cov_dir = "covariances"
19-
bestfit_dir = "best_fits"
20-
sq_win_alms_dir = "sq_win_alms"
15+
mcms_dir = d["mcm_dir"]
16+
spectra_dir = d["spec_dir"]
17+
noise_dir = d["ps_model_dir"]
18+
cov_dir = d["cov_dir"]
19+
bestfit_dir = d["best_fits_dir"]
20+
sq_win_alms_dir = d["sq_win_alms_dir"]
2121

2222
pspy_utils.create_directory(cov_dir)
2323
surveys = d["surveys"]
Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,117 @@
1+
#TO BE TESTED MORE
2+
3+
import matplotlib
4+
5+
matplotlib.use("Agg")
6+
import sys
7+
8+
import numpy as np
9+
import pylab as plt
10+
import scipy.interpolate
11+
from pspipe_utils import log, misc
12+
from pspy import pspy_utils, so_dict, so_spectra
13+
14+
15+
def interpolate_dict(lb, cb, lth, spectra, force_positive=True, l_inf_lmin_equal_lmin=True, discard_cross=True):
16+
cl_dict = {}
17+
for spec in spectra:
18+
cl = scipy.interpolate.interp1d(lb, cb[spec], fill_value = "extrapolate")
19+
cl_dict[spec] = cl(lth)
20+
if l_inf_lmin_equal_lmin:
21+
id = np.where(lth <= np.min(lb))
22+
cl_dict[spec][id]= cb[spec][0]
23+
if force_positive:
24+
cl_dict[spec] = np.abs(cl_dict[spec])
25+
if discard_cross:
26+
if spec not in ["TT", "EE", "BB"]:
27+
cl_dict[spec] = np.zeros(len(lth))
28+
return cl_dict
29+
30+
d = so_dict.so_dict()
31+
d.read_from_file(sys.argv[1])
32+
log = log.get_logger(**d)
33+
34+
spectra_dir = d['spec_dir']
35+
ps_model_dir = d['ps_model_dir']
36+
plot_dir = d['plot_base_dir'] + "/noise_model/"
37+
38+
pspy_utils.create_directory(ps_model_dir)
39+
pspy_utils.create_directory(plot_dir)
40+
41+
spectra = ["TT", "TE", "TB", "ET", "BT", "EE", "EB", "BE", "BB"]
42+
surveys = d["surveys"]
43+
lmax = d["lmax"]
44+
type = d["type"]
45+
binning_file = d["binning_file"]
46+
47+
lth = np.arange(2, lmax+2)
48+
49+
for sv in surveys:
50+
arrays = d[f"arrays_{sv}"]
51+
for id_ar1, ar1 in enumerate(arrays):
52+
for id_ar2, ar2 in enumerate(arrays):
53+
if id_ar1 > id_ar2: continue
54+
55+
log.info(f"Computing noise for '{sv}' survey and '{ar1}x{ar2}' arrays")
56+
57+
l, bl_ar1 = misc.read_beams(d[f"beam_T_{sv}_{ar1}"], d[f"beam_pol_{sv}_{ar1}"])
58+
l, bl_ar2 = misc.read_beams(d[f"beam_T_{sv}_{ar2}"], d[f"beam_pol_{sv}_{ar2}"])
59+
60+
lb, nbs_ar1xar1 = so_spectra.read_ps(f"{spectra_dir}/{type}_{sv}_{ar1}x{sv}_{ar1}_noise.dat", spectra=spectra)
61+
lb, nbs_ar1xar2 = so_spectra.read_ps(f"{spectra_dir}/{type}_{sv}_{ar1}x{sv}_{ar2}_noise.dat", spectra=spectra)
62+
lb, nbs_ar2xar2 = so_spectra.read_ps(f"{spectra_dir}/{type}_{sv}_{ar2}x{sv}_{ar2}_noise.dat", spectra=spectra)
63+
64+
bb_ar1, bb_ar2 = {}, {}
65+
for field in ["T", "E", "B"]:
66+
lb, bb_ar1[field] = pspy_utils.naive_binning(l, bl_ar1[field], binning_file, lmax)
67+
lb, bb_ar2[field] = pspy_utils.naive_binning(l, bl_ar2[field], binning_file, lmax)
68+
69+
Rb = {}
70+
for spec in spectra:
71+
X,Y = spec
72+
nbs_ar1xar1[spec] *= bb_ar1[X] * bb_ar1[Y]
73+
nbs_ar1xar2[spec] *= bb_ar1[X] * bb_ar2[Y]
74+
nbs_ar2xar2[spec] *= bb_ar2[X] * bb_ar2[Y]
75+
76+
Rb[spec] = nbs_ar1xar2[spec] / np.sqrt(np.abs(nbs_ar1xar1[spec] * nbs_ar2xar2[spec]))
77+
78+
if ar1 == ar2:
79+
nlth = interpolate_dict(lb, nbs_ar1xar1, lth, spectra)
80+
else:
81+
Rlth = interpolate_dict(lb, Rb, lth, spectra)
82+
nlth_ar1xar1 = interpolate_dict(lb, nbs_ar1xar1, lth, spectra)
83+
nlth_ar2xar2 = interpolate_dict(lb, nbs_ar2xar2, lth, spectra)
84+
nlth = {}
85+
for spec in spectra:
86+
nlth[spec] = Rlth[spec] * np.sqrt(nlth_ar1xar1[spec] * nlth_ar2xar2[spec])
87+
88+
89+
for spec in spectra:
90+
plt.figure(figsize=(12,12))
91+
plt.plot(lth,
92+
nlth[spec],
93+
label="interpolate",
94+
color="lightblue")
95+
if ar1 == ar2:
96+
nbs= nbs_ar1xar1[spec]
97+
else:
98+
nbs= nbs_ar1xar2[spec]
99+
100+
plt.plot(lb,
101+
nbs,
102+
".",
103+
label = f"{sv} {ar1}x{ar2}",
104+
color="red")
105+
plt.legend(fontsize=20)
106+
plt.savefig(f"{plot_dir}/noise_interpolate_{ar1}x{ar2}_{sv}_{spec}.png", bbox_inches="tight")
107+
plt.clf()
108+
plt.close()
109+
110+
spec_name_noise_mean = f"mean_{ar1}x{ar2}_{sv}_noise"
111+
so_spectra.write_ps(ps_model_dir + f"/{spec_name_noise_mean}.dat", lth, nlth, type, spectra=spectra)
112+
113+
if ar2 != ar1:
114+
spec_name_noise_mean = f"mean_{ar2}x{ar1}_{sv}_noise"
115+
TE, ET, TB, BT, EB, BE = nlth["ET"], nlth["TE"], nlth["BT"], nlth["TB"], nlth["BE"], nlth["EB"]
116+
nlth["TE"], nlth["ET"], nlth["TB"], nlth["BT"], nlth["EB"], nlth["BE"] = TE, ET, TB, BT, EB, BE
117+
so_spectra.write_ps(ps_model_dir + f"/{spec_name_noise_mean}.dat", lth, nlth, type, spectra=spectra)
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
"""
2+
This script compute all alms squared windows, it's a necessary step of covariance computation.
3+
"""
4+
import sys
5+
6+
import numpy as np
7+
from pspipe_utils import log, pspipe_list
8+
from pspy import pspy_utils, so_dict, so_map, so_mpi, sph_tools
9+
10+
11+
def mult(map_a, map_b, log):
12+
res_a = 1 / map_a.data.pixsize()
13+
res_b = 1 / map_b.data.pixsize()
14+
15+
if res_a == res_b:
16+
prod = map_a.copy()
17+
prod.data *= map_b.data
18+
elif res_a < res_b:
19+
log.info("resample map a")
20+
prod = map_b.copy()
21+
map_a_proj = so_map.car2car(map_a, map_b)
22+
prod.data *= map_a_proj.data
23+
elif res_b < res_a:
24+
log.info("resample map b")
25+
prod = map_a.copy()
26+
map_b_proj = so_map.car2car(map_b, map_a)
27+
prod.data *= map_b_proj.data
28+
29+
return prod
30+
31+
32+
d = so_dict.so_dict()
33+
d.read_from_file(sys.argv[1])
34+
log = log.get_logger(**d)
35+
36+
surveys = d["surveys"]
37+
lmax = d["lmax"]
38+
niter = d["niter"]
39+
sq_win_alms_dir = d["sq_win_alms_dir"]
40+
41+
pspy_utils.create_directory(sq_win_alms_dir)
42+
43+
n_sq_alms, sv1_list, ar1_list, sv2_list, ar2_list = pspipe_list.get_spectra_list(d)
44+
45+
46+
log.info(f"number of sq win alms to compute : {n_sq_alms}")
47+
so_mpi.init(True)
48+
subtasks = so_mpi.taskrange(imin=0, imax=n_sq_alms - 1)
49+
print(subtasks)
50+
for task in subtasks:
51+
task = int(task)
52+
sv1, ar1, sv2, ar2 = sv1_list[task], ar1_list[task], sv2_list[task], ar2_list[task]
53+
54+
log.info(f"[{task:02d}] Computing map2alm for {sv1}_{ar1}x{sv2}_{ar2}...")
55+
56+
win_T1 = so_map.read_map(d["window_T_%s_%s" % (sv1, ar1)])
57+
win_T2 = so_map.read_map(d["window_T_%s_%s" % (sv2, ar2)])
58+
59+
sq_win = mult(win_T1, win_T2, log)
60+
# sq_win = win_T1.copy()
61+
# sq_win.data[:] *= win_T2.data[:]
62+
sqwin_alm = sph_tools.map2alm(sq_win, niter=niter, lmax=lmax)
63+
64+
np.save(f"{sq_win_alms_dir}/alms_{sv1}_{ar1}x{sv2}_{ar2}.npy", sqwin_alm)
Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
"""
2+
This script analyze the simulations generated by mc_mnms_get_spectra_from_nlms.py and stored in "sim_spec_dir" specified in the dictionary file.
3+
it estimates the mean and numerical std from the simulations
4+
"""
5+
6+
7+
from pspy import pspy_utils, so_dict, so_spectra
8+
from pspipe_utils import log
9+
import numpy as np
10+
import sys
11+
12+
d = so_dict.so_dict()
13+
d.read_from_file(sys.argv[1])
14+
log = log.get_logger(**d)
15+
16+
type = d["type"]
17+
surveys = d["surveys"]
18+
iStart = d["iStart"]
19+
iStop = d["iStop"]
20+
lmax = d["lmax"]
21+
sim_alm_dtype = d["sim_alm_dtype"]
22+
23+
if sim_alm_dtype == "complex64":
24+
spec_dtype = np.float32
25+
elif sim_alm_dtype == "complex128":
26+
spec_dtype = np.float64
27+
28+
29+
sim_spec_dir = d["sim_spec_dir"]
30+
mcm_dir = d["mcm_dir"]
31+
mc_dir = d["montecarlo_dir"]
32+
cov_dir = d["cov_dir"]
33+
34+
pspy_utils.create_directory(mc_dir)
35+
pspy_utils.create_directory(cov_dir)
36+
37+
spectra = ["TT", "TE", "TB", "ET", "BT", "EE", "EB", "BE", "BB"]
38+
39+
for kind in ["cross", "noise", "auto"]:
40+
41+
vec_list = []
42+
43+
for iii in range(iStart, iStop + 1):
44+
log.info(f"reading sim spectra {kind} {iii}")
45+
46+
vec = []
47+
48+
for spec in spectra:
49+
for id_sv1, sv1 in enumerate(surveys):
50+
arrays_1 = d[f"arrays_{sv1}"]
51+
for id_ar1, ar1 in enumerate(arrays_1):
52+
for id_sv2, sv2 in enumerate(surveys):
53+
arrays_2 = d[f"arrays_{sv2}"]
54+
for id_ar2, ar2 in enumerate(arrays_2):
55+
56+
if (id_sv1 == id_sv2) & (id_ar1 > id_ar2) : continue
57+
if (id_sv1 > id_sv2) : continue
58+
if (sv1 != sv2) & (kind == "noise"): continue
59+
if (sv1 != sv2) & (kind == "auto"): continue
60+
61+
spec_name = f"{type}_{sv1}_{ar1}x{sv2}_{ar2}_{kind}_%05d" % iii
62+
63+
lb, Db = so_spectra.read_ps(sim_spec_dir + f"/{spec_name}.dat", spectra=spectra)
64+
65+
n_bins = len(lb)
66+
vec = np.append(vec, Db[spec])
67+
68+
vec_list += [vec.astype(spec_dtype)]
69+
70+
log.info(f"computing monte carlo mean and std")
71+
72+
mean_vec = np.mean(vec_list, axis=0)
73+
std_vec = np.std(vec_list, axis=0)
74+
75+
log.info(f"saving files on disk")
76+
77+
id_spec = 0
78+
for spec in spectra:
79+
for id_sv1, sv1 in enumerate(surveys):
80+
arrays_1 = d[f"arrays_{sv1}"]
81+
for id_ar1, ar1 in enumerate(arrays_1):
82+
for id_sv2, sv2 in enumerate(surveys):
83+
arrays_2 = d[f"arrays_{sv2}"]
84+
for id_ar2, ar2 in enumerate(arrays_2):
85+
86+
if (id_sv1 == id_sv2) & (id_ar1 > id_ar2) : continue
87+
if (id_sv1 > id_sv2) : continue
88+
if (sv1 != sv2) & (kind == "noise"): continue
89+
if (sv1 != sv2) & (kind == "auto"): continue
90+
91+
mean = mean_vec[id_spec * n_bins:(id_spec + 1) * n_bins]
92+
std = std_vec[id_spec * n_bins:(id_spec + 1) * n_bins]
93+
94+
np.savetxt(f"{mc_dir}/spectra_{spec}_{sv1}_{ar1}x{sv2}_{ar2}_{kind}.dat", np.array([lb, mean, std]).T)
95+
96+
id_spec += 1

0 commit comments

Comments
 (0)