-
Notifications
You must be signed in to change notification settings - Fork 7
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #361 from LSSTDESC/mock_calibration
Add mock catalog type requiring no calibration, and add example pipeline
- Loading branch information
Showing
11 changed files
with
439 additions
and
8 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
import argparse | ||
|
||
|
||
|
||
parser = argparse.ArgumentParser() | ||
parser.add_argument("--output", type=str, default="simple_cat.txt") | ||
parser.add_argument("--nmax", type=int, default=10000, help="Number of objects to generate, though cat size is reduced by redshift cut") | ||
parser.add_argument("--mass", type=float, default=10.0e14, help="Cluster mass in Msun") | ||
parser.add_argument("--cluster_z", type=float, default=0.22, help="Cluster redshift") | ||
parser.add_argument("--concentration", type=float, default=4.0, help="Cluster concentration parameter") | ||
parser.add_argument("--size", type=float, default=240.0, help="Size of the region in arcmin") | ||
parser.add_argument("--mean-z", type=float, default=0.5, help="Mean redshift of the background galaxies") | ||
parser.add_argument("--sigma-z", type=float, default=0.1, help="Redshift std dev of the background galaxies") | ||
parser.add_argument("--mean-snr", type=float, default=20., help="Mean SNR") | ||
parser.add_argument("--mean-size", type=float, default=0.3, help="Galaxy mean size^2 T parameter in arcsec") | ||
parser.add_argument("--sigma-size", type=float, default=0.3, help="Galaxy std dev size^2 T parameter in arcsec") | ||
args = parser.parse_args() | ||
|
||
mass = args.mass | ||
cluster_z = args.cluster_z | ||
concentration = args.concentration | ||
|
||
|
||
import galsim | ||
import numpy as np | ||
from astropy.table import Table | ||
import argparse | ||
|
||
nfw = galsim.NFWHalo(mass, concentration, cluster_z) | ||
|
||
half_size = args.size / 2 | ||
xmin = -half_size | ||
xmax = half_size | ||
ymin = -half_size | ||
ymax = half_size | ||
|
||
N = 10000 | ||
|
||
x = np.random.uniform(xmin, xmax, N) | ||
y = np.random.uniform(ymin, ymax, N) | ||
z = np.random.normal(args.mean_z, args.sigma_z, N) | ||
|
||
w = np.where(z > nfw.z) | ||
x1 = x[w] | ||
y1 = y[w] | ||
z1 = z[w] | ||
n = z1.size | ||
|
||
ra = np.zeros(n) + x1 / 3600 | ||
dec = np.zeros(n) + y1 / 3600 | ||
g1, g2 = nfw.getShear([x1, y1], z1, reduced=False) | ||
s2n = np.random.exponential(args.mean_snr, size=n) | ||
print(s2n.mean()) | ||
|
||
# This should give plenty of selected objects since we cut on T/Tpsf > 0.5 by default | ||
# and Tpsf default is ~.2 | ||
T = np.random.normal(args.mean_size, args.sigma_size, size=n).clip(0.01, np.inf) | ||
|
||
data = { | ||
"ra": ra, | ||
"dec": dec, | ||
"g1": g1, | ||
"g2": g2, | ||
"s2n": s2n, | ||
"T": T, | ||
"redshift": z1, | ||
} | ||
|
||
table = Table(data=data) | ||
table.write(args.output, overwrite=True, format="ascii.commented_header") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
import numpy as np | ||
import h5py | ||
ra = np.array([0.0]) | ||
dec = np.array([0.0]) | ||
z = np.array([0.22]) | ||
z_err = np.array([0.01]) | ||
ids = np.array([0]) | ||
richness = np.array([10.0]) | ||
richness_err = np.array([1.0]) | ||
scale = np.array([1.0]) | ||
|
||
with h5py.File("mock_single_cluster_catalog.hdf5", "w") as f: | ||
g = f.create_group("clusters") | ||
g | ||
g.create_dataset("cluster_id", data=ids) | ||
g.create_dataset("ra", data=ra) | ||
g.create_dataset("dec", data=dec) | ||
g.create_dataset("redshift", data=z) | ||
g.create_dataset("redshift_err", data=z_err) | ||
g.create_dataset("richness", data=richness) | ||
g.create_dataset("richness_err", data=richness_err) | ||
g.create_dataset("scaleval", data=scale) | ||
|
||
g.attrs["n_clusters"] = 1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
global: | ||
chunk_rows: 100000 | ||
pixelization: healpix | ||
nside: 64 | ||
sparse: true | ||
|
||
|
||
TXSourceSelectorSimple: | ||
input_pz: false | ||
true_z: true | ||
bands: riz | ||
T_cut: 0.5 | ||
s2n_cut: 10.0 | ||
max_rows: 1000 | ||
delta_gamma: 0.02 | ||
source_zbin_edges: [0.5, 0.7, 0.9, 1.1, 2.0] | ||
shear_prefix: '' | ||
verbose: true | ||
|
||
TXMockTruthPZ: | ||
mock_sigma_z: 0.001 | ||
|
||
|
||
CLClusterShearCatalogs: | ||
redshift_cut_criterion: zmode | ||
redshift_weight_criterion: zmode |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
# Stages to run | ||
stages: | ||
- name: TXSimpleMock # Convert a text file mock catalog to HDF5 | ||
- name: TXSourceSelectorSimple # select and split objects into source bins | ||
- name: TXShearCalibration # Calibrate and split the source sample tomographically | ||
- name: TXMockTruthPZ # Generate PDFs as narrow gaussian centered on the true redshifts | ||
aliases: | ||
photoz_pdfs: source_photoz_pdfs | ||
- name: CLClusterShearCatalogs # Extract and weight the shear catalog around every cluster | ||
|
||
|
||
|
||
# modules and packages to import that have pipeline | ||
# stages defined in them | ||
modules: > | ||
txpipe | ||
# where to find any modules that are not in this repo, | ||
# and any other code we need. | ||
python_paths: | ||
- submodules/WLMassMap/python/desc/ | ||
|
||
# Where to put outputs | ||
output_dir: data/example/outputs_mock_shear | ||
|
||
# How to run the pipeline: mini, parsl, or cwl | ||
launcher: | ||
name: mini | ||
interval: 1.0 | ||
|
||
# Where to run the pipeline: cori-interactive, cori-batch, or local | ||
site: | ||
name: local | ||
max_threads: 2 | ||
|
||
# configuration settings | ||
config: examples/mock_shear/config.yml | ||
|
||
# These are overall inputs to the whole pipeline, not generated within it | ||
inputs: | ||
mock_shear_catalog: data/example/inputs/mock_nfw_shear_catalog.txt | ||
calibration_table: data/example/inputs/sample_cosmodc2_w10year_errors.dat | ||
cluster_catalog: data/example/inputs/mock_single_cluster_catalog.hdf5 | ||
fiducial_cosmology: data/fiducial_cosmology.yml | ||
|
||
|
||
|
||
# if supported by the launcher, restart the pipeline where it left off | ||
# if interrupted | ||
resume: true | ||
# where to put output logs for individual stages | ||
log_dir: data/example/logs_mock_shear | ||
# where to put an overall parsl pipeline log | ||
pipeline_log: data/example/mock_shear_log.txt |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.