aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorWeitian LI <liweitianux@gmail.com>2014-06-18 22:20:59 +0800
committerWeitian LI <liweitianux@gmail.com>2014-06-18 22:20:59 +0800
commite3923265d0d6949407a83726e9a9bd5d97079221 (patch)
tree9afd8520595f4cf80815b9bccfc3dcf2879ebe47
downloadchandra-acis-analysis-e3923265d0d6949407a83726e9a9bd5d97079221.tar.bz2
Initial commit
Added files: * mass_profile: developed by Junhua GU, modified by Weitian LI * opt_utilities: developed by Junhua GU * tools/cosmo_calc: originated from 'calc_distance', modified * scripts: scripts used to process Chandra ACIS data * files: useful files used in processing * HOWTO_chandra_acis_process.txt * README.md
-rw-r--r--.gitignore6
-rw-r--r--HOWTO_chandra_acis_process.txt150
-rw-r--r--README.md41
-rw-r--r--files/empty_v3.json109
-rw-r--r--files/fitting_dbeta_mass.conf7
-rw-r--r--files/fitting_dbeta_sbp.conf17
m---------mass_profile0
-rwxr-xr-xmod_ly/coolfunc_0.1-2.4_calc.sh137
-rwxr-xr-xmod_ly/coolfunc_0.1-2.4_calc_rs.sh139
-rwxr-xr-xmod_ly/coolfunc_0.5-2_calc.sh137
-rwxr-xr-xmod_ly/coolfunc_0.5-2_calc_rs.sh139
-rwxr-xr-xmod_ly/extract_tcool.py14
-rwxr-xr-xmod_ly/fit_beta_nfw_mass_profile.sh235
-rwxr-xr-xmod_ly/fit_beta_nfw_mass_profile2.sh235
-rwxr-xr-xmod_ly/fit_beta_nfw_mass_profile_simple.sh135
-rwxr-xr-xmod_ly/fit_dbeta_nfw_mass_profile.sh235
-rwxr-xr-xmod_ly/fit_dbeta_nfw_mass_profile2.sh235
-rwxr-xr-xmod_ly/fit_dbeta_nfw_mass_profile_simple.sh135
-rwxr-xr-xmod_ly/luminosity_0.1-2.4.sh77
-rwxr-xr-xmod_ly/luminosity_0.1-2.4_check.sh80
-rwxr-xr-xmod_ly/luminosity_0.1-2.4_lwt.sh79
-rwxr-xr-xmod_ly/luminosity_0.1-2.4_lwt_rs.sh79
-rwxr-xr-xmod_ly/luminosity_0.1-2.4_orig.sh72
-rwxr-xr-xmod_ly/luminosity_0.5-2.sh77
-rwxr-xr-xmod_ly/luminosity_0.5-2_lwt.sh79
-rwxr-xr-xmod_ly/luminosity_0.5-2_lwt_rs.sh79
-rwxr-xr-xmod_ly/luminosity_0.5-2_orig.sh72
-rwxr-xr-xmod_ly/lx_simple_gxy.sh104
-rwxr-xr-xmod_ly/lx_simple_gxy_rs.sh104
-rwxr-xr-xmod_ly/shuffle_T.py18
-rwxr-xr-xmod_ly/shuffle_sbp.py18
m---------opt_utilities0
-rwxr-xr-xscripts/calc_lx_simple_v2.sh127
-rwxr-xr-xscripts/chandra_bkg_rescale.sh54
-rwxr-xr-xscripts/chandra_ccdgap_rect.py80
-rwxr-xr-xscripts/chandra_collect_data_v3.sh616
-rwxr-xr-xscripts/chandra_gensbpreg.sh135
-rwxr-xr-xscripts/chandra_genspcreg.sh130
-rwxr-xr-xscripts/chandra_json2csv_v3.py270
-rwxr-xr-xscripts/chandra_pb_flux.sh43
-rwxr-xr-xscripts/chandra_update_xcentroid.sh212
-rwxr-xr-xscripts/chandra_xcentroid.sh317
-rwxr-xr-xscripts/chandra_xpeak_coord.sh222
-rwxr-xr-xscripts/ciao_bkg_spectra_v4.sh416
-rwxr-xr-xscripts/ciao_blanksky_v4.sh242
-rwxr-xr-xscripts/ciao_calc_csb.sh235
-rwxr-xr-xscripts/ciao_calc_ct.sh593
-rwxr-xr-xscripts/ciao_calc_ct_csb.sh40
-rwxr-xr-xscripts/ciao_deproj_spectra_v8.sh670
-rwxr-xr-xscripts/ciao_expcorr_only.sh358
-rwxr-xr-xscripts/ciao_genregs_v1.sh277
-rwxr-xr-xscripts/ciao_procevt_v2.sh182
-rwxr-xr-xscripts/ciao_r500avgt_v3.sh539
-rwxr-xr-xscripts/ciao_sbp_v3.1.sh529
-rwxr-xr-xscripts/clean_massdir.sh219
-rw-r--r--tools/cosmo_calc/Makefile31
-rw-r--r--tools/cosmo_calc/README.txt20
-rw-r--r--tools/cosmo_calc/VERSION.txt10
-rw-r--r--tools/cosmo_calc/adapt_trapezoid.h124
-rw-r--r--tools/cosmo_calc/calc_distance.cc140
-rw-r--r--tools/cosmo_calc/calc_distance.h59
-rw-r--r--tools/cosmo_calc/calc_distance.obin0 -> 99536 bytes
-rwxr-xr-xtools/cosmo_calc/cosmo_calcbin0 -> 76999 bytes
-rw-r--r--tools/cosmo_calc/ddivid.cc56
-rw-r--r--tools/cosmo_calc/ddivid.h8
-rw-r--r--tools/cosmo_calc/ddivid.obin0 -> 6464 bytes
-rw-r--r--tools/cosmo_calc/main.cc140
-rw-r--r--tools/cosmo_calc/main.obin0 -> 35024 bytes
68 files changed, 10138 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..1d809cc
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,6 @@
+# .gitignore for chandra_acis_process
+#
+
+*~
+*.swp
+
diff --git a/HOWTO_chandra_acis_process.txt b/HOWTO_chandra_acis_process.txt
new file mode 100644
index 0000000..95a9f0a
--- /dev/null
+++ b/HOWTO_chandra_acis_process.txt
@@ -0,0 +1,150 @@
+###########################################################
+## HOWTO
+## Processing Chandra ACIS data
+##
+## Weitian LI <liweitianux@gmail.com>
+## Last updated: 2014/06/18
+###########################################################
+
+### Settings: {{{
+## directory:
+# chandra scripts:
+CHANDRA_SCRIPT_DIR="${HOME}/bin/chandra/scripts"
+# mass program dir:
+MASS_PROFILE_DIR="${HOME}/bin/chandra/mass_profile"
+
+## aliases:
+# ciao scripts
+alias chcld="${CIAO_SCRIPT_DIR}/chandra_collect_data_v3.sh"
+alias chr500="${CIAO_SCRIPT_DIR}/ciao_r500avgt_v3.sh"
+# mass_profile related
+alias fitmass="${MASS_PROFILE_DIR}/fit_mass.sh"
+alias fitnfw="${MASS_PROFILE_DIR}/fit_nfw_mass mass_int.dat"
+alias fitsbp="${MASS_PROFILE_DIR}/fit_sbp.sh"
+alias fitwang="${MASS_PROFILE_DIR}/fit_wang2012_model tcl_temp_profile.txt"
+alias calclxfx="${MASS_PROFILE_DIR}/calc_lxfx_simple.sh"
+alias getlxfx="${MASS_PROFILE_DIR}/get_lxfx_data.sh"
+### Settings: }}}
+
+### Websites: {{{
+NED: http://ned.ipac.caltech.edu/forms/byname.html
+Chandra Chaser: http://cda.harvard.edu/chaser/dispatchOcat.do
+SIMBAD: http://simbad.u-strasbg.fr/simbad/
+nH tool: https://heasarc.gsfc.nasa.gov/cgi-bin/Tools/w3nh/w3nh.pl
+### }}}
+
+### Process step-guide: {{{
+ (1) $ chandra_repro indir=. outdir=repro verbose=2
+ (2) $ cd repro; mkdir -p evt bkg img spc/profile mass;
+ (3) $ cd evt; ln -s ../acis*evt2.fits .;
+ (4) $ ciao_procevt2.sh
+ (5) $ cd ../bkg; ln -s ../evt/evt2*_clean.fits .;
+ (6) $ ciao_blanksky_v4.sh
+ (7) ds9 open 'evt2*_clean.fits'; select edge region as the
+ *local background*, save as 'lbkg.reg';
+ and estimate the phonton counts:
+ (8) $ dmlist "evt2_clean.fits[sky=region(lbkg.reg)][energy=400:8000]" blocks
+ (9) $ ciao_bkg_spectra_v4.sh reg="lbkg.reg" basedir=.. nh=<nh> z=<z>
+(10) $ xspec
+(11) xspec> @xspec_lbkg_model.xcm
+ xspec> fit
+ xspec> cpd /xs
+ xspec> pl l del
+ xspec> @~/astro/scripts/xspec_bkgcorr_v2.tcl
+(12) cd ../img; ln -s ../evt/evt2*_clean.fits .;
+(13) ds9 open 'evt2*_clean.fits', select the center of cluster,
+ save the region as 'center.reg'
+(14) $ ciao_genregs_v1.sh reg_in=center.reg
+(15) $ cd ../spc/profile;
+ $ ln -s ../../evt/evt2*_clean.fits .;
+ $ ln -s ../../bkg/bkgcorr_blanksky_lbkg.pi .; (maybe 'lbkg.pi')
+ $ ln -s ../../img/rspec.reg img_rspec.reg;
+(16) ds9 open 'evt2*_clean.fits' with regs 'img_rspec.reg';
+ adjust the regions and save as 'rspec.reg'
+(17) create conf file '<NAME>_spec.conf' like:
+ basedir ../..
+ reg rspec.reg
+ bkgd bkgcorr_blanksky_lbkg.pi | lbkg.pi
+ nh <nh>
+ z <redshift>
+(18) $ ciao_deproj_spectra_v8.sh reg="rspec.reg" bkgd=<bkgd> basedir="../.." nh=<nh> z=<z>
+(19) $ xspec
+ xspec> @xspec_deproj.xcm
+ xspec> fit
+ xspec> @~/astro/scripts/xspec_coolfunc_v2.tcl
+ (input 1)
+ xspec> exit
+(20) fix 'NULL' values in 'tcl_temp_profile.qdp' & 'tcl_temp_profile.txt'
+(21) $ xspec
+ xspec> newpar <..>=<..> (bind all kT's and Abundanc's)
+ xspec> fit (get avarage Temp and Abund)
+ xspec> exit
+(22) $ cd ../../img;
+ create '<NAME>_expcorr.conf' with contents:
+ basedir ..
+ reg sbprofile.reg
+ nh <nh>
+ z <redshift>
+ temp <avg_temp>
+ abund <avg_abund>
+(23) $ ciao_expcorr_sbp_v4.sh basedir=.. nh=<nh> z=<redshift> temp=<avg_temp> abund=<avg_abund>
+## --------------------------------------------------------
+
+(24) $ cd ../mass; clean_massdir.sh;
+(25) check needed linked files:
+ sbprofile.txt, flux_sbp.txt, radius_sbp.txt, tcl_temp_profile.txt
+(26) check needed config files:
+ wang2012_param.txt,
+ fitting_mass.conf, fitting_sbp.conf,
+ fitting_dbeta_mass.conf, fitting_dbeta_sbp.conf,
+ global.cfg
+(27) $ fitwang wang2012_param.txt
+(28) $ qdp fit_result.qdp # check fitted temperature profile
+(29) $ fitsbp fitting_mass.conf # single-beta sbp
+ $ fitsbp fitting_dbeta_mass.conf # double-beta sbp
+(30) $ qdp sbp_fit.qdp # check fitted sbp
+(31) $ ln -s fitting_mass.conf global.cfg # use single-beta
+ $ ln -s fitting_dbeta_mass.conf global.cfg # use double-beta
+(32) $ fitnfw <z> [rmin_kpc]
+(33) $ qdp nfw_fit_result.qdp # check fitted nfw profile, and ajust 'rmin_kpc'
+(34) add 'nfw_rmin_kpc' to 'global.cfg'
+(35) $ fitmass global.cfg c # calculate the center values
+(36) $ fitmass global.cfg 2>&1 | tee global_<date>.log # calculate mass data
+(37) $ chandra_collect_data_v3.sh
+(38) $ cd ../img;
+ $ chandra_update_xcentroid.sh
+(39) $ cd ../spc/profile;
+(40) $ chr500 inner=0.1 outer=0.5 # check 0.5R500 range
+ $ chr500 inner=0.2 outer=0.5
+(41) $ xspec
+ xspec> @xspec_r500avgt_0.1-0.5.xcm
+ xspec> fit; fit;
+ xspec> cpd /xs; pl l del;
+ xspec> error 1. 2 3 # calc errors (68%) of kT and Abundanc
+(42) update the data of 'T(0.1-0.5 R500)', 'Z(0.1-0.5 R500)' ...
+
+## --------------------------------------------------------
+(43) $ cd ../..; # in 'repro' dir
+ $ cp -a mass lxfx; cd lxfx;
+(44) $ calclxfx global.cfg c 500 200 # 'c' for center values
+ $ calclxfx global.cfg 500 200 # calc 'errors'
+(45) $ getlxfx . c 500 200 # for center results
+ $ getlxfx . 500 200 # for all results (with errors)
+
+## --------------------------------------------------------
+(46) $ cd ..; cd spc/profile;
+ calculate cooling time and Csb value:
+ $ ciao_calc_ct_csb.sh
+ check the region whether exceeds ccd edge, and answer y/n
+ check results in file 'ct_csb_results.txt'
+ ---
+ Alternatively:
+ $ ciao_calc_ct.sh
+ $ ciao_calc_csb.sh
+ check the region whether exceeds ccd edge, and answer y/n
+ check results in file 'cooling_results.txt' and 'csb_results.txt'
+ ---
+### process }}}
+
+
+# vim: set ts=8 sw=4 tw=0 fenc=utf-8 ft=txt: #
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..9c6cbdb
--- /dev/null
+++ b/README.md
@@ -0,0 +1,41 @@
+README.md
+=========
+
+
+Install/Compile
+---------------
+(1) mass_profile
+$ cd mass_profile
+$ make clean
+$ heainit # initilize heasoft, to link libpgplot.a and libcpgplot.a
+$ ./init.sh
+$ make
+(2) cosmo_calc
+$ cd tools/cosmo_calc
+$ make clean; make
+$ cp cosmo_calc ~/bin # within $PATH
+
+
+Settings
+--------
+## Add following settings to ~/.bashrc
+# environment variables:
+export MASS_PROFILE_DIR="/path/to/mass_profile"
+export CHANDRA_SCRIPT_DIR="/path/to/script"
+# aliaes
+# ciao scripts
+alias chcld="${CIAO_SCRIPT_DIR}/chandra_collect_data_v3.sh"
+alias chr500="${CIAO_SCRIPT_DIR}/ciao_r500avgt_v3.sh"
+# mass_profile related
+alias fitmass="${MASS_PROFILE_DIR}/fit_mass.sh"
+alias fitnfw="${MASS_PROFILE_DIR}/fit_nfw_mass mass_int.dat"
+alias fitsbp="${MASS_PROFILE_DIR}/fit_sbp.sh"
+alias fitwang="${MASS_PROFILE_DIR}/fit_wang2012_model tcl_temp_profile.txt"
+alias calclxfx="${MASS_PROFILE_DIR}/calc_lxfx_simple.sh"
+alias getlxfx="${MASS_PROFILE_DIR}/get_lxfx_data.sh"
+
+
+Usage
+-----
+see 'HOWTO_chandra_acis_process
+
diff --git a/files/empty_v3.json b/files/empty_v3.json
new file mode 100644
index 0000000..1095c1e
--- /dev/null
+++ b/files/empty_v3.json
@@ -0,0 +1,109 @@
+{
+ "Obs. ID": ,
+ "Source Name": "",
+ "Obs. Date": "",
+ "Detector": "",
+ "Exposure (ks)": ,
+ "Clean Exposure (ks)": ,
+ "R. A.": "",
+ "Dec.": "",
+ "nH (10^22 cm^-2)": ,
+ "redshift": ,
+ "E(z)": ,
+ "T_ref (keV)": ,
+ "Z_ref (solar)": ,
+ "Rmax_SBP (pixel)": ,
+ "Rmax_Tpro (pixel)": ,
+ "Rmax_SBP (kpc)": ,
+ "Rmax_Tpro (kpc)": ,
+ "NFW_Rmin (kpc)": ,
+ "Model_SBP": "",
+ "n01": ,
+ "beta1": ,
+ "rc1": ,
+ "rc1_kpc": ,
+ "n02": ,
+ "beta2": ,
+ "rc2": ,
+ "rc2_kpc": ,
+ "bkg": ,
+ "R200 (kpc)": ,
+ "R200_err_lower (1sigma)": ,
+ "R200_err_upper (1sigma)": ,
+ "M200 (M_sun)": ,
+ "M200_err_lower (1sigma)": ,
+ "M200_err_upper (1sigma)": ,
+ "L200 (erg/s)": ,
+ "L200_err (1sigma)": ,
+ "M_gas200 (M_sun)": ,
+ "M_gas200_err_lower (1sigma)": ,
+ "M_gas200_err_upper (1sigma)": ,
+ "F_gas200": ,
+ "F_gas200_err_lower (1sigma)": ,
+ "F_gas200_err_upper (1sigma)": ,
+ "R500 (kpc)": ,
+ "R500_err_lower (1sigma)": ,
+ "R500_err_upper (1sigma)": ,
+ "M500 (M_sun)": ,
+ "M500_err_lower (1sigma)": ,
+ "M500_err_upper (1sigma)": ,
+ "L500 (erg/s)": ,
+ "L500_err (1sigma)": ,
+ "M_gas500 (M_sun)": ,
+ "M_gas500_err_lower (1sigma)": ,
+ "M_gas500_err_upper (1sigma)": ,
+ "F_gas500": ,
+ "F_gas500_err_lower (1sigma)": ,
+ "F_gas500_err_upper (1sigma)": ,
+ "R1500": ,
+ "R1500_err_lower": ,
+ "R1500_err_upper": ,
+ "M1500": ,
+ "M1500_err_lower": ,
+ "M1500_err_upper": ,
+ "L1500": ,
+ "L1500_err": ,
+ "M_gas1500": ,
+ "M_gas1500_err_lower": ,
+ "M_gas1500_err_upper": ,
+ "F_gas1500": ,
+ "F_gas1500_err_lower": ,
+ "F_gas1500_err_upper": ,
+ "R2500": ,
+ "R2500_err_lower": ,
+ "R2500_err_upper": ,
+ "M2500": ,
+ "M2500_err_lower": ,
+ "M2500_err_upper": ,
+ "L2500": ,
+ "L2500_err": ,
+ "M_gas2500": ,
+ "M_gas2500_err_lower": ,
+ "M_gas2500_err_upper": ,
+ "F_gas2500": ,
+ "F_gas2500_err_lower": ,
+ "F_gas2500_err_upper": ,
+ "T(0.1-0.5 R500)": ,
+ "T_err(0.1-0.5 R500)": ,
+ "T_err_l(0.1-0.5 R500)": ,
+ "T_err_u(0.1-0.5 R500)": ,
+ "Z(0.1-0.5 R500)": ,
+ "Z_err(0.1-0.5 R500)": ,
+ "Z_err_l(0.1-0.5 R500)": ,
+ "Z_err_u(0.1-0.5 R500)": ,
+ "T(0.2-0.5 R500)": ,
+ "T_err(0.2-0.5 R500)": ,
+ "T_err_l(0.2-0.5 R500)": ,
+ "T_err_u(0.2-0.5 R500)": ,
+ "Z(0.2-0.5 R500)": ,
+ "Z_err(0.2-0.5 R500)": ,
+ "Z_err_l(0.2-0.5 R500)": ,
+ "Z_err_u(0.2-0.5 R500)": ,
+ "F_gas(R2500-R500)": ,
+ "F_gas_err_l(R2500-R500)": ,
+ "F_gas_err_u(R2500-R500)": ,
+ "R_cool (kpc)": ,
+ "Cooling_time (Gyr)": ,
+ "Cool_core": ,
+ "NOTE": ""
+},
diff --git a/files/fitting_dbeta_mass.conf b/files/fitting_dbeta_mass.conf
new file mode 100644
index 0000000..4bd2521
--- /dev/null
+++ b/files/fitting_dbeta_mass.conf
@@ -0,0 +1,7 @@
+t_profile wang2012
+t_data_file tcl_temp_profile.txt
+t_param_file wang2012_param.txt
+sbp_cfg fitting_dbeta_sbp.conf
+nh 0.
+abund 0.
+radius_sbp_file sbprofile.txt
diff --git a/files/fitting_dbeta_sbp.conf b/files/fitting_dbeta_sbp.conf
new file mode 100644
index 0000000..d62bae8
--- /dev/null
+++ b/files/fitting_dbeta_sbp.conf
@@ -0,0 +1,17 @@
+radius_file radius_sbp.txt
+sbp_file flux_sbp.txt
+
+cfunc_file coolfunc_calc_data.txt
+T_file t_profile_dump.qdp
+
+n01 0.005
+rc1 30
+beta1 0.7
+n02 0.005
+rc2 300
+beta2 0.7
+bkg 0.0
+
+cm_per_pixel
+z
+
diff --git a/mass_profile b/mass_profile
new file mode 160000
+Subproject 14d38f35ce25cf3ca391ec45467243dd819f043
diff --git a/mod_ly/coolfunc_0.1-2.4_calc.sh b/mod_ly/coolfunc_0.1-2.4_calc.sh
new file mode 100755
index 0000000..7dc1a6a
--- /dev/null
+++ b/mod_ly/coolfunc_0.1-2.4_calc.sh
@@ -0,0 +1,137 @@
+#!/bin/sh
+#
+# unalias -a
+#
+###########################################################
+## Task: ##
+## Calc `cooling function' data according to ##
+## given `temperature profile' ##
+## ##
+## NOTE: ##
+## given `tprofile': <radius> <temperature> ##
+## calc `cooling function' by invoking `XSPEC' ##
+## using model `wabs*apec' ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## August 17, 2012 ##
+###########################################################
+
+## cmdline arguments {{{
+if [ $# -ne 5 ]; then
+ printf "usage:\n"
+ printf " `basename $0` <tprofile> <avg_abund> <nH> <redshift> <coolfunc_outfile>\n"
+ exit 1
+fi
+base_path=`dirname $0`
+TPROFILE=$1
+ABUND_VAL=$2
+N_H=$3
+REDSHIFT=$4
+
+NORM=`$base_path/calc_distance $REDSHIFT|grep norm|awk '{print $2}'`
+echo $NORM
+
+COOLFUNC_DAT=$5
+COOLFUNC_DAT_RATIO=flux_cnt_ratio.txt
+
+if [ ! -r "${TPROFILE}" ]; then
+ printf "ERROR: given tprofile '${TPROFILE}' NOT accessiable\n"
+ exit 2
+fi
+[ -e "${COOLFUNC_DAT}" ] && rm -f ${COOLFUNC_DAT}
+[ -e "${COOLFUNC_DAT_RATIO}" ] && rm -f ${COOLFUNC_DAT_RATIO}
+## arguments }}}
+
+## specify variable name outside while loop
+## otherwise the inside vars invisible
+XSPEC_CF_XCM="_coolfunc_calc.xcm"
+[ -e "${XSPEC_CF_XCM}" ] && rm -f ${XSPEC_CF_XCM}
+
+## generate xspec script {{{
+cat >> ${XSPEC_CF_XCM} << _EOF_
+## XSPEC Tcl script
+## calc cooling function data
+##
+## generated by: `basename $0`
+## date: `date`
+
+set xs_return_results 1
+set xs_echo_script 0
+# set tcl_precision 12
+dummyrsp .01 100 4096
+## set basic data {{{
+set nh ${N_H}
+set redshift ${REDSHIFT}
+set abund_val ${ABUND_VAL}
+set norm ${NORM}
+## basic }}}
+
+## xspec related {{{
+# debug settings {{{
+chatter 0
+# debug }}}
+query yes
+abund grsa
+dummyrsp 0.3 11.0 1024
+# load model 'wabs*apec' to calc cooling function
+model wabs*apec & \${nh} & 1.0 & \${abund_val} & \${redshift} & \${norm} & /*
+## xspec }}}
+
+## set input and output filename
+set tpro_fn "${TPROFILE}"
+set cf_fn "${COOLFUNC_DAT}"
+set cff_fn "${COOLFUNC_DAT_RATIO}"
+if { [ file exists \${cf_fn} ] } {
+ exec rm -fv \${cf_fn}
+}
+
+if { [ file exists \${cff_fn} ] } {
+ exec rm -fv \${cff_fn}
+}
+
+## open files
+set tpro_fd [ open \${tpro_fn} r ]
+set cf_fd [ open \${cf_fn} w ]
+set cff_fd [ open \${cff_fn} w ]
+
+## read data from tprofile line by line
+while { [ gets \${tpro_fd} tpro_line ] != -1 } {
+ # gets one line
+ scan \${tpro_line} "%f %f" radius temp_val
+ #puts "radius: \${radius}, temperature: \${temp_val}"
+ # set temperature value
+ newpar 2 \${temp_val}
+ # calc flux & tclout
+ flux 0.7 7.0
+ tclout flux 1
+ scan \${xspec_tclout} "%f %f %f %f" holder holder holder cf_data
+ #puts "cf_data: \${cf_data}"
+ puts \${cf_fd} "\${radius} \${cf_data}"
+ flux 0.1 2.4
+ tclout flux 1
+ scan \${xspec_tclout} "%f %f %f %f" cff_data holder holder holder
+ puts \${cff_fd} "\${radius} [expr \${cff_data}/\${cf_data}]"
+}
+
+## close opened files
+close \${tpro_fd}
+close \${cf_fd}
+close \${cff_fd}
+
+## exit
+tclexit
+_EOF_
+
+## extract xcm }}}
+
+## invoke xspec to calc
+printf "invoking XSPEC to calculate cooling function data ...\n"
+# xspec - ${XSPEC_CF_XCM}
+xspec - ${XSPEC_CF_XCM} > /dev/null
+
+## clean
+# if [ -e "${XSPEC_CF_XCM}" ]; then
+# rm -f ${XSPEC_CF_XCM}
+# fi
+
+exit 0
diff --git a/mod_ly/coolfunc_0.1-2.4_calc_rs.sh b/mod_ly/coolfunc_0.1-2.4_calc_rs.sh
new file mode 100755
index 0000000..262e8ed
--- /dev/null
+++ b/mod_ly/coolfunc_0.1-2.4_calc_rs.sh
@@ -0,0 +1,139 @@
+#!/bin/sh
+#
+# unalias -a
+#
+###########################################################
+## Task: ##
+## Calc `cooling function' data according to ##
+## given `temperature profile' ##
+## ##
+## NOTE: ##
+## given `tprofile': <radius> <temperature> ##
+## calc `cooling function' by invoking `XSPEC' ##
+## using model `wabs*apec' ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## August 17, 2012 ##
+###########################################################
+
+## cmdline arguments {{{
+if [ $# -ne 5 ]; then
+ printf "usage:\n"
+ printf " `basename $0` <tprofile> <avg_abund> <nH> <redshift> <coolfunc_outfile>\n"
+ exit 1
+fi
+base_path=`dirname $0`
+TPROFILE=$1
+ABUND_VAL=$2
+N_H=$3
+REDSHIFT=$4
+
+NORM=`$base_path/calc_distance $REDSHIFT|grep norm|awk '{print $2}'`
+echo $NORM
+
+COOLFUNC_DAT=$5
+COOLFUNC_DAT_RATIO=flux_cnt_ratio.txt
+
+if [ ! -r "${TPROFILE}" ]; then
+ printf "ERROR: given tprofile '${TPROFILE}' NOT accessiable\n"
+ exit 2
+fi
+[ -e "${COOLFUNC_DAT}" ] && rm -f ${COOLFUNC_DAT}
+[ -e "${COOLFUNC_DAT_RATIO}" ] && rm -f ${COOLFUNC_DAT_RATIO}
+## arguments }}}
+
+## specify variable name outside while loop
+## otherwise the inside vars invisible
+XSPEC_CF_XCM="_coolfunc_calc.xcm"
+[ -e "${XSPEC_CF_XCM}" ] && rm -f ${XSPEC_CF_XCM}
+
+## generate xspec script {{{
+cat >> ${XSPEC_CF_XCM} << _EOF_
+## XSPEC Tcl script
+## calc cooling function data
+##
+## generated by: `basename $0`
+## date: `date`
+
+set xs_return_results 1
+set xs_echo_script 0
+# set tcl_precision 12
+dummyrsp .01 100 4096
+## set basic data {{{
+set nh ${N_H}
+set redshift ${REDSHIFT}
+set abund_val ${ABUND_VAL}
+set norm ${NORM}
+## basic }}}
+
+## xspec related {{{
+# debug settings {{{
+chatter 0
+# debug }}}
+query yes
+abund grsa
+dummyrsp 0.3 11.0 1024
+# # load model 'wabs*apec' to calc cooling function
+# model wabs*apec & \${nh} & 1.0 & \${abund_val} & \${redshift} & \${norm} & /*
+# model 'wabs*raymond'
+model wabs*raymond & \${nh} & 1.0 & \${abund_val} & \${redshift} & \${norm} & /*
+## xspec }}}
+
+## set input and output filename
+set tpro_fn "${TPROFILE}"
+set cf_fn "${COOLFUNC_DAT}"
+set cff_fn "${COOLFUNC_DAT_RATIO}"
+if { [ file exists \${cf_fn} ] } {
+ exec rm -fv \${cf_fn}
+}
+
+if { [ file exists \${cff_fn} ] } {
+ exec rm -fv \${cff_fn}
+}
+
+## open files
+set tpro_fd [ open \${tpro_fn} r ]
+set cf_fd [ open \${cf_fn} w ]
+set cff_fd [ open \${cff_fn} w ]
+
+## read data from tprofile line by line
+while { [ gets \${tpro_fd} tpro_line ] != -1 } {
+ # gets one line
+ scan \${tpro_line} "%f %f" radius temp_val
+ #puts "radius: \${radius}, temperature: \${temp_val}"
+ # set temperature value
+ newpar 2 \${temp_val}
+ # calc flux & tclout
+ flux 0.7 7.0
+ tclout flux 1
+ scan \${xspec_tclout} "%f %f %f %f" holder holder holder cf_data
+ #puts "cf_data: \${cf_data}"
+ puts \${cf_fd} "\${radius} \${cf_data}"
+ flux 0.1 2.4
+ tclout flux 1
+ scan \${xspec_tclout} "%f %f %f %f" cff_data holder holder holder
+ puts \${cff_fd} "\${radius} [expr \${cff_data}/\${cf_data}]"
+}
+
+## close opened files
+close \${tpro_fd}
+close \${cf_fd}
+close \${cff_fd}
+
+## exit
+tclexit
+_EOF_
+
+## extract xcm }}}
+
+## invoke xspec to calc
+printf "invoking XSPEC to calculate cooling function data ...\n"
+# xspec - ${XSPEC_CF_XCM}
+xspec - ${XSPEC_CF_XCM} > /dev/null
+
+## clean
+# if [ -e "${XSPEC_CF_XCM}" ]; then
+# rm -f ${XSPEC_CF_XCM}
+# fi
+
+exit 0
diff --git a/mod_ly/coolfunc_0.5-2_calc.sh b/mod_ly/coolfunc_0.5-2_calc.sh
new file mode 100755
index 0000000..889a4e3
--- /dev/null
+++ b/mod_ly/coolfunc_0.5-2_calc.sh
@@ -0,0 +1,137 @@
+#!/bin/sh
+#
+# unalias -a
+#
+###########################################################
+## Task: ##
+## Calc `cooling function' data according to ##
+## given `temperature profile' ##
+## ##
+## NOTE: ##
+## given `tprofile': <radius> <temperature> ##
+## calc `cooling function' by invoking `XSPEC' ##
+## using model `wabs*apec' ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## August 17, 2012 ##
+###########################################################
+
+## cmdline arguments {{{
+if [ $# -ne 5 ]; then
+ printf "usage:\n"
+ printf " `basename $0` <tprofile> <avg_abund> <nH> <redshift> <coolfunc_outfile>\n"
+ exit 1
+fi
+base_path=`dirname $0`
+TPROFILE=$1
+ABUND_VAL=$2
+N_H=$3
+REDSHIFT=$4
+
+NORM=`$base_path/calc_distance $REDSHIFT|grep norm|awk '{print $2}'`
+echo $NORM
+
+COOLFUNC_DAT=$5
+COOLFUNC_DAT_RATIO=flux_cnt_ratio.txt
+
+if [ ! -r "${TPROFILE}" ]; then
+ printf "ERROR: given tprofile '${TPROFILE}' NOT accessiable\n"
+ exit 2
+fi
+[ -e "${COOLFUNC_DAT}" ] && rm -f ${COOLFUNC_DAT}
+[ -e "${COOLFUNC_DAT_RATIO}" ] && rm -f ${COOLFUNC_DAT_RATIO}
+## arguments }}}
+
+## specify variable name outside while loop
+## otherwise the inside vars invisible
+XSPEC_CF_XCM="_coolfunc_calc.xcm"
+[ -e "${XSPEC_CF_XCM}" ] && rm -f ${XSPEC_CF_XCM}
+
+## generate xspec script {{{
+cat >> ${XSPEC_CF_XCM} << _EOF_
+## XSPEC Tcl script
+## calc cooling function data
+##
+## generated by: `basename $0`
+## date: `date`
+
+set xs_return_results 1
+set xs_echo_script 0
+# set tcl_precision 12
+dummyrsp .01 100 4096
+## set basic data {{{
+set nh ${N_H}
+set redshift ${REDSHIFT}
+set abund_val ${ABUND_VAL}
+set norm ${NORM}
+## basic }}}
+
+## xspec related {{{
+# debug settings {{{
+chatter 0
+# debug }}}
+query yes
+abund grsa
+dummyrsp 0.3 11.0 1024
+# load model 'wabs*apec' to calc cooling function
+model wabs*apec & \${nh} & 1.0 & \${abund_val} & \${redshift} & \${norm} & /*
+## xspec }}}
+
+## set input and output filename
+set tpro_fn "${TPROFILE}"
+set cf_fn "${COOLFUNC_DAT}"
+set cff_fn "${COOLFUNC_DAT_RATIO}"
+if { [ file exists \${cf_fn} ] } {
+ exec rm -fv \${cf_fn}
+}
+
+if { [ file exists \${cff_fn} ] } {
+ exec rm -fv \${cff_fn}
+}
+
+## open files
+set tpro_fd [ open \${tpro_fn} r ]
+set cf_fd [ open \${cf_fn} w ]
+set cff_fd [ open \${cff_fn} w ]
+
+## read data from tprofile line by line
+while { [ gets \${tpro_fd} tpro_line ] != -1 } {
+ # gets one line
+ scan \${tpro_line} "%f %f" radius temp_val
+ #puts "radius: \${radius}, temperature: \${temp_val}"
+ # set temperature value
+ newpar 2 \${temp_val}
+ # calc flux & tclout
+ flux 0.7 7.0
+ tclout flux 1
+ scan \${xspec_tclout} "%f %f %f %f" holder holder holder cf_data
+ #puts "cf_data: \${cf_data}"
+ puts \${cf_fd} "\${radius} \${cf_data}"
+ flux 0.5 2.0
+ tclout flux 1
+ scan \${xspec_tclout} "%f %f %f %f" cff_data holder holder holder
+ puts \${cff_fd} "\${radius} [expr \${cff_data}/\${cf_data}]"
+}
+
+## close opened files
+close \${tpro_fd}
+close \${cf_fd}
+close \${cff_fd}
+
+## exit
+tclexit
+_EOF_
+
+## extract xcm }}}
+
+## invoke xspec to calc
+printf "invoking XSPEC to calculate cooling function data ...\n"
+# xspec - ${XSPEC_CF_XCM}
+xspec - ${XSPEC_CF_XCM} > /dev/null
+
+## clean
+# if [ -e "${XSPEC_CF_XCM}" ]; then
+# rm -f ${XSPEC_CF_XCM}
+# fi
+
+exit 0
diff --git a/mod_ly/coolfunc_0.5-2_calc_rs.sh b/mod_ly/coolfunc_0.5-2_calc_rs.sh
new file mode 100755
index 0000000..3d8933b
--- /dev/null
+++ b/mod_ly/coolfunc_0.5-2_calc_rs.sh
@@ -0,0 +1,139 @@
+#!/bin/sh
+#
+# unalias -a
+#
+###########################################################
+## Task: ##
+## Calc `cooling function' data according to ##
+## given `temperature profile' ##
+## ##
+## NOTE: ##
+## given `tprofile': <radius> <temperature> ##
+## calc `cooling function' by invoking `XSPEC' ##
+## using model `wabs*apec' ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## August 17, 2012 ##
+###########################################################
+
+## cmdline arguments {{{
+if [ $# -ne 5 ]; then
+ printf "usage:\n"
+ printf " `basename $0` <tprofile> <avg_abund> <nH> <redshift> <coolfunc_outfile>\n"
+ exit 1
+fi
+base_path=`dirname $0`
+TPROFILE=$1
+ABUND_VAL=$2
+N_H=$3
+REDSHIFT=$4
+
+NORM=`$base_path/calc_distance $REDSHIFT|grep norm|awk '{print $2}'`
+echo $NORM
+
+COOLFUNC_DAT=$5
+COOLFUNC_DAT_RATIO=flux_cnt_ratio.txt
+
+if [ ! -r "${TPROFILE}" ]; then
+ printf "ERROR: given tprofile '${TPROFILE}' NOT accessiable\n"
+ exit 2
+fi
+[ -e "${COOLFUNC_DAT}" ] && rm -f ${COOLFUNC_DAT}
+[ -e "${COOLFUNC_DAT_RATIO}" ] && rm -f ${COOLFUNC_DAT_RATIO}
+## arguments }}}
+
+## specify variable name outside while loop
+## otherwise the inside vars invisible
+XSPEC_CF_XCM="_coolfunc_calc.xcm"
+[ -e "${XSPEC_CF_XCM}" ] && rm -f ${XSPEC_CF_XCM}
+
+## generate xspec script {{{
+cat >> ${XSPEC_CF_XCM} << _EOF_
+## XSPEC Tcl script
+## calc cooling function data
+##
+## generated by: `basename $0`
+## date: `date`
+
+set xs_return_results 1
+set xs_echo_script 0
+# set tcl_precision 12
+dummyrsp .01 100 4096
+## set basic data {{{
+set nh ${N_H}
+set redshift ${REDSHIFT}
+set abund_val ${ABUND_VAL}
+set norm ${NORM}
+## basic }}}
+
+## xspec related {{{
+# debug settings {{{
+chatter 0
+# debug }}}
+query yes
+abund grsa
+dummyrsp 0.3 11.0 1024
+# # load model 'wabs*apec' to calc cooling function
+# model wabs*apec & \${nh} & 1.0 & \${abund_val} & \${redshift} & \${norm} & /*
+# model 'wabs*raymond'
+model wabs*raymond & \${nh} & 1.0 & \${abund_val} & \${redshift} & \${norm} & /*
+## xspec }}}
+
+## set input and output filename
+set tpro_fn "${TPROFILE}"
+set cf_fn "${COOLFUNC_DAT}"
+set cff_fn "${COOLFUNC_DAT_RATIO}"
+if { [ file exists \${cf_fn} ] } {
+ exec rm -fv \${cf_fn}
+}
+
+if { [ file exists \${cff_fn} ] } {
+ exec rm -fv \${cff_fn}
+}
+
+## open files
+set tpro_fd [ open \${tpro_fn} r ]
+set cf_fd [ open \${cf_fn} w ]
+set cff_fd [ open \${cff_fn} w ]
+
+## read data from tprofile line by line
+while { [ gets \${tpro_fd} tpro_line ] != -1 } {
+ # gets one line
+ scan \${tpro_line} "%f %f" radius temp_val
+ #puts "radius: \${radius}, temperature: \${temp_val}"
+ # set temperature value
+ newpar 2 \${temp_val}
+ # calc flux & tclout
+ flux 0.7 7.0
+ tclout flux 1
+ scan \${xspec_tclout} "%f %f %f %f" holder holder holder cf_data
+ #puts "cf_data: \${cf_data}"
+ puts \${cf_fd} "\${radius} \${cf_data}"
+ flux 0.5 2.0
+ tclout flux 1
+ scan \${xspec_tclout} "%f %f %f %f" cff_data holder holder holder
+ puts \${cff_fd} "\${radius} [expr \${cff_data}/\${cf_data}]"
+}
+
+## close opened files
+close \${tpro_fd}
+close \${cf_fd}
+close \${cff_fd}
+
+## exit
+tclexit
+_EOF_
+
+## extract xcm }}}
+
+## invoke xspec to calc
+printf "invoking XSPEC to calculate cooling function data ...\n"
+# xspec - ${XSPEC_CF_XCM}
+xspec - ${XSPEC_CF_XCM} > /dev/null
+
+## clean
+# if [ -e "${XSPEC_CF_XCM}" ]; then
+# rm -f ${XSPEC_CF_XCM}
+# fi
+
+exit 0
diff --git a/mod_ly/extract_tcool.py b/mod_ly/extract_tcool.py
new file mode 100755
index 0000000..05c836b
--- /dev/null
+++ b/mod_ly/extract_tcool.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+
+import sys
+rcool=float(sys.argv[1])
+
+for l in open('cooling_time.dat'):
+ r,t=l.split()
+ r=float(r)
+ t=float(t)
+ if r>rcool:
+ print("cooling time at %f kpc=%f Gyr"%(rcool,t))
+ print("rcool= %f kpc" % rcool)
+ print("cooling_time= %f Gyr @ %f kpc" % (t,rcool))
+ sys.exit(0)
diff --git a/mod_ly/fit_beta_nfw_mass_profile.sh b/mod_ly/fit_beta_nfw_mass_profile.sh
new file mode 100755
index 0000000..8da6070
--- /dev/null
+++ b/mod_ly/fit_beta_nfw_mass_profile.sh
@@ -0,0 +1,235 @@
+#!/bin/sh
+
+# modified by LIweitaNux, 2012/09/06
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -eq 1 ]; then
+ cfg_file="$1"
+elif [ $# -eq 2 ]; then
+ cfg_file="$1"
+ CENTER_VAL="YES"
+else
+ echo "Usage: $0 <cfg_file> [c]"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+printf "## base_path: \`${base_path}'\n"
+printf "## use configuration file: \`${cfg_file}'\n"
+
+#initialize profile type name
+t_profile_type=`grep '^t_profile' $cfg_file | awk '{print $2}'`
+printf "## t_profile_type: \`$t_profile_type'\n"
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+# cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+da=`python -c "print($cm_per_pixel/(.492/3600/180*3.1415926))"`
+dl=`python -c "print($da*(1+$z)**2)"`
+printf "da= ${da}\n"
+printf "dl= ${dl}\n"
+## sbp {{{
+sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+# mv -f _tmp_rsbp.txt ${radius_sbp_file}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+# determine which temperature profile to be used, and fit the T profile {{{
+if [ "$t_profile_type" = "zyy" ]; then
+ $base_path/fit_zyy_model $t_data_file $t_param_file $cm_per_pixel
+ # mv -f zyy_dump.qdp ${T_file}
+ mv -f zyy_dump.qdp zyy_dump_center.qdp
+elif [ "$t_profile_type" = "m0603246" ]; then
+ $base_path/fit_m0603246 $t_data_file $cm_per_pixel
+ # mv -f m0603246_dump.qdp ${T_file}
+ mv -f m0603246_dump.qdp m0603246_dump_center.qdp
+elif [ "$t_profile_type" = "wang2012" ]; then
+ $base_path/fit_wang2012_model $t_data_file $t_param_file $cm_per_pixel 2> /dev/null | tee wang2012_center_param.txt
+ # mv -f wang2012_dump.qdp ${T_file}
+ cp -fv wang2012_dump.qdp ${T_file}
+ T_file_center="wang2012_dump_center.qdp"
+ mv -fv wang2012_dump.qdp ${T_file_center}
+ mv -fv fit_result.qdp wang2012_fit_center.qdp
+elif [ "$t_profile_type" = "allen" ]; then
+ $base_path/fit_allen_model $t_data_file $cm_per_pixel
+ # mv -f allen_dump.qdp ${T_file}
+ mv -f allen_dump.qdp allen_dump_center.qdp
+elif [ "$t_profile_type" = "zzl" ]; then
+ $base_path/fit_zzl_model $t_data_file $t_param_file
+ # mv -f zzl_dump.qdp ${T_file}
+ mv -f zzl_dump.qdp zzl_dump_center.qdp
+else
+ printf "ERROR: temperature profile name \`${t_profile_type}' invalid!\n"
+ exit 10
+fi
+# temp profile }}}
+
+$base_path/coolfunc_calc_bolo.sh ${T_file_center} $abund $nh $z cfunc_bolo.dat
+$base_path/coolfunc_calc.sh ${T_file_center} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_cnt_ratio_center.txt
+# fit sbp
+$base_path/fit_beta_sbp $sbp_cfg 2> /dev/null
+mv -fv beta_param.txt beta_param_center.txt
+cat beta_param_center.txt
+mv -fv sbp_fit.qdp sbp_fit_center.qdp
+mv -fv rho_fit.qdp rho_fit_center.qdp
+mv -fv rho_fit.dat rho_fit_center.dat
+$base_path/fit_nfw_mass mass_int.dat $z 2> /dev/null
+mv -fv nfw_param.txt nfw_param_center.txt
+mv -fv nfw_fit_result.qdp nfw_fit_center.qdp
+mv -fv nfw_dump.qdp mass_int_center.qdp
+mv -fv overdensity.qdp overdensity_center.qdp
+mv -fv gas_mass_int.qdp gas_mass_int_center.qdp
+
+#exit 233
+
+## cooling time
+$base_path/cooling_time rho_fit_center.dat ${T_file_center} cfunc_bolo.dat $dl $cm_per_pixel > cooling_time.dat
+## radius to calculate tcool, not the cooling time!
+rcool=`$base_path/analyze_mass_profile.py 500 c | grep '^r500' | awk -F "=" '{print .048*$2}'`
+printf "rcool= ${rcool}\n"
+
+## center value {{{
+if [ "${CENTER_VAL}" = "YES" ]; then
+ $base_path/analyze_mass_profile.py 200 c
+ $base_path/analyze_mass_profile.py 500 c
+ $base_path/extract_tcool.py $rcool | tee cooling_time_result.txt
+ exit 0
+fi
+## center value }}}
+
+# clean previous files
+rm -f summary_shuffle_mass_profile.qdp
+rm -f summary_overdensity.qdp
+rm -f summary_mass_profile.qdp
+rm -f summary_gas_mass_profile.qdp
+
+## count
+COUNT=1
+
+#100 times of Monte-carlo simulation to determine error
+#just repeat above steps
+printf "\n+++++++++++++++++++ Monte Carlo +++++++++++++++++++++\n"
+for i in `seq 1 100`; do
+ # echo $t_data_file
+ $base_path/shuffle_T.py $t_data_file temp_shuffled_t.dat
+ $base_path/shuffle_sbp.py $sbp_data_file temp_shuffled_sbp.dat
+ # t_data_file=temp_shuffled_t.dat
+#exit
+
+ if [ "$t_profile_type" = "zyy" ]; then
+ $base_path/fit_zyy_model temp_shuffled_t.dat $t_param_file $cm_per_pixel
+ mv -f zyy_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "m0603246" ]; then
+ $base_path/fit_m0603246 temp_shuffled_t.dat $cm_per_pixel
+ mv -f m0603246_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "wang2012" ]; then
+ $base_path/fit_wang2012_model temp_shuffled_t.dat $t_param_file $cm_per_pixel 2> /dev/null
+ mv -f wang2012_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "allen" ]; then
+ $base_path/fit_allen_model temp_shuffled_t.dat $cm_per_pixel
+ mv -f allen_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "zzl" ]; then
+ $base_path/fit_zzl_model temp_shuffled_t.dat $t_param_file
+ mv -f zzl_dump.qdp ${T_file}
+ else
+ printf "ERROR: temperature profile name \`${t_profile_type}' invalid!\n"
+ exit 10
+ fi
+
+#exit
+ : > temp_sbp.cfg
+
+ cat $sbp_cfg | while read l; do
+ if echo $l | grep -q 'sbp_file'; then
+ echo "sbp_file temp_shuffled_sbp.dat" >> temp_sbp.cfg
+ elif echo $l | grep -q 'T_file'; then
+ echo "T_file ${T_file}" >> temp_sbp.cfg
+ else
+ echo $l >> temp_sbp.cfg
+ fi
+ done
+
+ ## count
+ printf "## ${COUNT} ##\n"
+ COUNT=`expr ${COUNT} + 1`
+ printf "## `pwd` ##\n"
+ $base_path/coolfunc_calc.sh ${T_file} ${abund} ${nh} ${z} ${cfunc_file}
+
+ # single-beta model
+ $base_path/fit_beta_sbp temp_sbp.cfg 2> /dev/null
+ [ -r "beta_param.txt" ] && cat beta_param.txt
+ $base_path/fit_nfw_mass mass_int.dat $z 2> /dev/null
+ cat nfw_dump.qdp >> summary_mass_profile.qdp
+ echo "no no no" >> summary_mass_profile.qdp
+
+ cat overdensity.qdp >> summary_overdensity.qdp
+ echo "no no no" >> summary_overdensity.qdp
+
+ cat gas_mass_int.qdp >> summary_gas_mass_profile.qdp
+ echo "no no no" >> summary_gas_mass_profile.qdp
+
+done # end `while'
+printf "\n+++++++++++++++++++ Monte Carlo +++++++++++++++++++++\n"
+
+#analys the errors
+printf "\n+++++++++++++++ RESULTS (single-beta) +++++++++++++++\n"
+RESULT="results_mrl.txt"
+[ -e "${RESULT}" ] && mv -fv ${RESULT} ${RESULT}_bak
+
+$base_path/analyze_mass_profile.py 200 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 500 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 1500 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 2500 | tee -a ${RESULT}
+
+R200_VAL=`grep '^r200' ${RESULT} | awk '{ print $2 }'`
+R500_VAL=`grep '^r500' ${RESULT} | awk '{ print $2 }'`
+printf "\n## R200: ${R200_VAL}, R500: ${R500_VAL}\n"
+L200=`$base_path/calc_lx $radius_sbp_file flux_cnt_ratio_center.txt $z ${R200_VAL} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500=`$base_path/calc_lx $radius_sbp_file flux_cnt_ratio_center.txt $z ${R500_VAL} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "L200= ${L200} erg/s\n" | tee -a ${RESULT}
+printf "L500= ${L500} erg/s\n" | tee -a ${RESULT}
+$base_path/extract_tcool.py $rcool | tee cooling_time_result.txt
+printf "\n+++++++++++++++++++++++++++++++++++++++++++++++++++++\n"
+
diff --git a/mod_ly/fit_beta_nfw_mass_profile2.sh b/mod_ly/fit_beta_nfw_mass_profile2.sh
new file mode 100755
index 0000000..87f2b12
--- /dev/null
+++ b/mod_ly/fit_beta_nfw_mass_profile2.sh
@@ -0,0 +1,235 @@
+#!/bin/sh
+
+# modified by LIweitaNux, 2012/09/06
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -eq 1 ]; then
+ cfg_file="$1"
+elif [ $# -eq 2 ]; then
+ cfg_file="$1"
+ CENTER_VAL="YES"
+else
+ echo "Usage: $0 <cfg_file> [c]"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+printf "## base_path: \`${base_path}'\n"
+printf "## use configuration file: \`${cfg_file}'\n"
+
+#initialize profile type name
+t_profile_type=`grep '^t_profile' $cfg_file | awk '{print $2}'`
+printf "## t_profile_type: \`$t_profile_type'\n"
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+# cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+da=`python -c "print($cm_per_pixel/(.492/3600/180*3.1415926))"`
+dl=`python -c "print($da*(1+$z)**2)"`
+printf "da= ${da}\n"
+printf "dl= ${dl}\n"
+## sbp {{{
+sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+# mv -f _tmp_rsbp.txt ${radius_sbp_file}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+# determine which temperature profile to be used, and fit the T profile {{{
+if [ "$t_profile_type" = "zyy" ]; then
+ $base_path/fit_zyy_model $t_data_file $t_param_file $cm_per_pixel
+ # mv -f zyy_dump.qdp ${T_file}
+ mv -f zyy_dump.qdp zyy_dump_center.qdp
+elif [ "$t_profile_type" = "m0603246" ]; then
+ $base_path/fit_m0603246 $t_data_file $cm_per_pixel
+ # mv -f m0603246_dump.qdp ${T_file}
+ mv -f m0603246_dump.qdp m0603246_dump_center.qdp
+elif [ "$t_profile_type" = "wang2012" ]; then
+ $base_path/fit_wang2012_model $t_data_file $t_param_file $cm_per_pixel 2> /dev/null | tee wang2012_center_param.txt
+ # mv -f wang2012_dump.qdp ${T_file}
+ cp -fv wang2012_dump.qdp ${T_file}
+ T_file_center="wang2012_dump_center.qdp"
+ mv -fv wang2012_dump.qdp ${T_file_center}
+ mv -fv fit_result.qdp wang2012_fit_center.qdp
+elif [ "$t_profile_type" = "allen" ]; then
+ $base_path/fit_allen_model $t_data_file $cm_per_pixel
+ # mv -f allen_dump.qdp ${T_file}
+ mv -f allen_dump.qdp allen_dump_center.qdp
+elif [ "$t_profile_type" = "zzl" ]; then
+ $base_path/fit_zzl_model $t_data_file $t_param_file
+ # mv -f zzl_dump.qdp ${T_file}
+ mv -f zzl_dump.qdp zzl_dump_center.qdp
+else
+ printf "ERROR: temperature profile name \`${t_profile_type}' invalid!\n"
+ exit 10
+fi
+# temp profile }}}
+
+$base_path/coolfunc_calc_bolo.sh ${T_file_center} $abund $nh $z cfunc_bolo.dat
+$base_path/coolfunc_calc.sh ${T_file_center} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_cnt_ratio_center.txt
+# fit sbp
+$base_path/fit_beta_sbp2 $sbp_cfg 2> /dev/null
+mv -fv beta_param.txt beta_param_center.txt
+cat beta_param_center.txt
+mv -fv sbp_fit.qdp sbp_fit_center.qdp
+mv -fv rho_fit.qdp rho_fit_center.qdp
+mv -fv rho_fit.dat rho_fit_center.dat
+$base_path/fit_nfw_mass mass_int.dat $z 2> /dev/null
+mv -fv nfw_param.txt nfw_param_center.txt
+mv -fv nfw_fit_result.qdp nfw_fit_center.qdp
+mv -fv nfw_dump.qdp mass_int_center.qdp
+mv -fv overdensity.qdp overdensity_center.qdp
+mv -fv gas_mass_int.qdp gas_mass_int_center.qdp
+
+#exit 233
+
+## cooling time
+$base_path/cooling_time rho_fit_center.dat ${T_file_center} cfunc_bolo.dat $dl $cm_per_pixel > cooling_time.dat
+## radius to calculate tcool, not the cooling time!
+rcool=`$base_path/analyze_mass_profile.py 500 c | grep '^r500' | awk -F "=" '{print .048*$2}'`
+printf "rcool= ${rcool}\n"
+
+## center value {{{
+if [ "${CENTER_VAL}" = "YES" ]; then
+ $base_path/analyze_mass_profile.py 200 c
+ $base_path/analyze_mass_profile.py 500 c
+ $base_path/extract_tcool.py $rcool | tee cooling_time_result.txt
+ exit 0
+fi
+## center value }}}
+
+# clean previous files
+rm -f summary_shuffle_mass_profile.qdp
+rm -f summary_overdensity.qdp
+rm -f summary_mass_profile.qdp
+rm -f summary_gas_mass_profile.qdp
+
+## count
+COUNT=1
+
+#100 times of Monte-carlo simulation to determine error
+#just repeat above steps
+printf "\n+++++++++++++++++++ Monte Carlo +++++++++++++++++++++\n"
+for i in `seq 1 100`; do
+ # echo $t_data_file
+ $base_path/shuffle_T.py $t_data_file temp_shuffled_t.dat
+ $base_path/shuffle_sbp.py $sbp_data_file temp_shuffled_sbp.dat
+ # t_data_file=temp_shuffled_t.dat
+#exit
+
+ if [ "$t_profile_type" = "zyy" ]; then
+ $base_path/fit_zyy_model temp_shuffled_t.dat $t_param_file $cm_per_pixel
+ mv -f zyy_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "m0603246" ]; then
+ $base_path/fit_m0603246 temp_shuffled_t.dat $cm_per_pixel
+ mv -f m0603246_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "wang2012" ]; then
+ $base_path/fit_wang2012_model temp_shuffled_t.dat $t_param_file $cm_per_pixel 2> /dev/null
+ mv -f wang2012_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "allen" ]; then
+ $base_path/fit_allen_model temp_shuffled_t.dat $cm_per_pixel
+ mv -f allen_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "zzl" ]; then
+ $base_path/fit_zzl_model temp_shuffled_t.dat $t_param_file
+ mv -f zzl_dump.qdp ${T_file}
+ else
+ printf "ERROR: temperature profile name \`${t_profile_type}' invalid!\n"
+ exit 10
+ fi
+
+#exit
+ : > temp_sbp.cfg
+
+ cat $sbp_cfg | while read l; do
+ if echo $l | grep -q 'sbp_file'; then
+ echo "sbp_file temp_shuffled_sbp.dat" >> temp_sbp.cfg
+ elif echo $l | grep -q 'T_file'; then
+ echo "T_file ${T_file}" >> temp_sbp.cfg
+ else
+ echo $l >> temp_sbp.cfg
+ fi
+ done
+
+ ## count
+ printf "## ${COUNT} ##\n"
+ COUNT=`expr ${COUNT} + 1`
+ printf "## `pwd` ##\n"
+ $base_path/coolfunc_calc.sh ${T_file} ${abund} ${nh} ${z} ${cfunc_file}
+
+ # single-beta model
+ $base_path/fit_beta_sbp2 temp_sbp.cfg 2> /dev/null
+ [ -r "beta_param.txt" ] && cat beta_param.txt
+ $base_path/fit_nfw_mass mass_int.dat $z 2> /dev/null
+ cat nfw_dump.qdp >> summary_mass_profile.qdp
+ echo "no no no" >> summary_mass_profile.qdp
+
+ cat overdensity.qdp >> summary_overdensity.qdp
+ echo "no no no" >> summary_overdensity.qdp
+
+ cat gas_mass_int.qdp >> summary_gas_mass_profile.qdp
+ echo "no no no" >> summary_gas_mass_profile.qdp
+
+done # end `while'
+printf "\n+++++++++++++++++++ Monte Carlo +++++++++++++++++++++\n"
+
+#analys the errors
+printf "\n+++++++++++++++ RESULTS (single-beta) +++++++++++++++\n"
+RESULT="results_mrl.txt"
+[ -e "${RESULT}" ] && mv -fv ${RESULT} ${RESULT}_bak
+
+$base_path/analyze_mass_profile.py 200 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 500 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 1500 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 2500 | tee -a ${RESULT}
+
+R200_VAL=`grep '^r200' ${RESULT} | awk '{ print $2 }'`
+R500_VAL=`grep '^r500' ${RESULT} | awk '{ print $2 }'`
+printf "\n## R200: ${R200_VAL}, R500: ${R500_VAL}\n"
+L200=`$base_path/calc_lx $radius_sbp_file flux_cnt_ratio_center.txt $z ${R200_VAL} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500=`$base_path/calc_lx $radius_sbp_file flux_cnt_ratio_center.txt $z ${R500_VAL} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "L200= ${L200} erg/s\n" | tee -a ${RESULT}
+printf "L500= ${L500} erg/s\n" | tee -a ${RESULT}
+$base_path/extract_tcool.py $rcool | tee cooling_time_result.txt
+printf "\n+++++++++++++++++++++++++++++++++++++++++++++++++++++\n"
+
diff --git a/mod_ly/fit_beta_nfw_mass_profile_simple.sh b/mod_ly/fit_beta_nfw_mass_profile_simple.sh
new file mode 100755
index 0000000..dc4fd3c
--- /dev/null
+++ b/mod_ly/fit_beta_nfw_mass_profile_simple.sh
@@ -0,0 +1,135 @@
+#!/bin/sh
+
+# modified by LIweitaNux, 2012/09/06
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -eq 1 ]; then
+ cfg_file="$1"
+elif [ $# -eq 2 ]; then
+ cfg_file="$1"
+ CENTER_VAL="YES"
+else
+ echo "Usage: $0 <cfg_file> [c]"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+printf "## base_path: \`${base_path}'\n"
+printf "## use configuration file: \`${cfg_file}'\n"
+
+#initialize profile type name
+t_profile_type=`grep '^t_profile' $cfg_file | awk '{print $2}'`
+printf "## t_profile_type: \`$t_profile_type'\n"
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+# cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+da=`python -c "print($cm_per_pixel/(.492/3600/180*3.1415926))"`
+dl=`python -c "print($da*(1+$z)**2)"`
+## sbp {{{
+sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+
+if [ "x$radius_sbp_file" = "x" ] || [ ! -r "${radius_sbp_file}" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+# mv -f _tmp_rsbp.txt ${radius_sbp_file}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+# determine which temperature profile to be used, and fit the T profile {{{
+if [ "$t_profile_type" = "zyy" ]; then
+ $base_path/fit_zyy_model $t_data_file $t_param_file $cm_per_pixel
+ # mv -f zyy_dump.qdp ${T_file}
+ mv -f zyy_dump.qdp zyy_dump_center.qdp
+elif [ "$t_profile_type" = "m0603246" ]; then
+ $base_path/fit_m0603246 $t_data_file $cm_per_pixel
+ # mv -f m0603246_dump.qdp ${T_file}
+ mv -f m0603246_dump.qdp m0603246_dump_center.qdp
+elif [ "$t_profile_type" = "wang2012" ]; then
+ $base_path/fit_wang2012_model $t_data_file $t_param_file $cm_per_pixel 2> /dev/null | tee wang2012_center_param.txt
+ # mv -f wang2012_dump.qdp ${T_file}
+ cp -fv wang2012_dump.qdp ${T_file}
+ T_file_center="wang2012_dump_center.qdp"
+ mv -fv wang2012_dump.qdp ${T_file_center}
+ mv -fv fit_result.qdp wang2012_fit_center.qdp
+elif [ "$t_profile_type" = "allen" ]; then
+ $base_path/fit_allen_model $t_data_file $cm_per_pixel
+ # mv -f allen_dump.qdp ${T_file}
+ mv -f allen_dump.qdp allen_dump_center.qdp
+elif [ "$t_profile_type" = "zzl" ]; then
+ $base_path/fit_zzl_model $t_data_file $t_param_file
+ # mv -f zzl_dump.qdp ${T_file}
+ mv -f zzl_dump.qdp zzl_dump_center.qdp
+else
+ printf "ERROR: temperature profile name \`${t_profile_type}' invalid!\n"
+ exit 10
+fi
+# temp profile }}}
+
+$base_path/coolfunc_calc_bolo.sh ${T_file_center} $abund $nh $z cfunc_bolo.dat
+$base_path/coolfunc_calc.sh ${T_file_center} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_cnt_ratio_center.txt
+# fit sbp
+$base_path/fit_beta_sbp $sbp_cfg 2> /dev/null
+mv -fv beta_param.txt beta_param_center.txt
+mv -fv sbp_fit.qdp sbp_fit_center.qdp
+mv -fv rho_fit.qdp rho_fit_center.qdp
+$base_path/fit_nfw_mass mass_int.dat $z 2> /dev/null
+mv -fv nfw_param.txt nfw_param_center.txt
+mv -fv nfw_fit_result.qdp nfw_fit_center.qdp
+mv -fv nfw_dump.qdp mass_int_center.qdp
+mv -fv overdensity.qdp overdensity_center.qdp
+mv -fv gas_mass_int.qdp gas_mass_int_center.qdp
+
+## cooling time
+$base_path/cooling_time rho_fit_center.dat ${T_file_center} cfunc_bolo.dat $dl $cm_per_pixel > cooling_time.dat
+## radius to calculate tcool, not the cooling time!
+rcool=`$base_path/analyze_mass_profile.py 500 c | grep '^r500' | awk -F "=" '{print .048*$2}'`
+
+## center value {{{
+if [ "${CENTER_VAL}" = "YES" ]; then
+ $base_path/analyze_mass_profile.py 200 c
+ $base_path/analyze_mass_profile.py 500 c
+ exit 0
+fi
+## center value }}}
+
+exit 0
+
diff --git a/mod_ly/fit_dbeta_nfw_mass_profile.sh b/mod_ly/fit_dbeta_nfw_mass_profile.sh
new file mode 100755
index 0000000..be0404f
--- /dev/null
+++ b/mod_ly/fit_dbeta_nfw_mass_profile.sh
@@ -0,0 +1,235 @@
+#!/bin/sh
+
+# modified by LIweitaNux, 2012/09/06
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -eq 1 ]; then
+ cfg_file="$1"
+elif [ $# -eq 2 ]; then
+ cfg_file="$1"
+ CENTER_VAL="YES"
+else
+ echo "Usage: $0 <cfg_file> [c]"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+printf "## base_path: \`${base_path}'\n"
+printf "## use configuration file: \`${cfg_file}'\n"
+
+#initialize profile type name
+t_profile_type=`grep '^t_profile' $cfg_file | awk '{print $2}'`
+printf "## t_profile_type: \`$t_profile_type'\n"
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+# cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+da=`python -c "print($cm_per_pixel/(.492/3600/180*3.1415926))"`
+dl=`python -c "print($da*(1+$z)**2)"`
+printf "da= ${da}\n"
+printf "dl= ${dl}\n"
+## sbp {{{
+sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+# mv -f _tmp_rsbp.txt ${radius_sbp_file}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+# determine which temperature profile to be used, and fit the T profile {{{
+if [ "$t_profile_type" = "zyy" ]; then
+ $base_path/fit_zyy_model $t_data_file $t_param_file $cm_per_pixel
+ # mv -f zyy_dump.qdp ${T_file}
+ mv -f zyy_dump.qdp zyy_dump_center.qdp
+elif [ "$t_profile_type" = "m0603246" ]; then
+ $base_path/fit_m0603246 $t_data_file $cm_per_pixel
+ # mv -f m0603246_dump.qdp ${T_file}
+ mv -f m0603246_dump.qdp m0603246_dump_center.qdp
+elif [ "$t_profile_type" = "wang2012" ]; then
+ $base_path/fit_wang2012_model $t_data_file $t_param_file $cm_per_pixel 2> /dev/null | tee wang2012_center_param.txt
+ # mv -f wang2012_dump.qdp ${T_file}
+ cp -fv wang2012_dump.qdp ${T_file}
+ T_file_center="wang2012_dump_center.qdp"
+ mv -fv wang2012_dump.qdp ${T_file_center}
+ mv -fv fit_result.qdp wang2012_fit_center.qdp
+elif [ "$t_profile_type" = "allen" ]; then
+ $base_path/fit_allen_model $t_data_file $cm_per_pixel
+ # mv -f allen_dump.qdp ${T_file}
+ mv -f allen_dump.qdp allen_dump_center.qdp
+elif [ "$t_profile_type" = "zzl" ]; then
+ $base_path/fit_zzl_model $t_data_file $t_param_file
+ # mv -f zzl_dump.qdp ${T_file}
+ mv -f zzl_dump.qdp zzl_dump_center.qdp
+else
+ printf "ERROR: temperature profile name \`${t_profile_type}' invalid!\n"
+ exit 10
+fi
+# temp profile }}}
+
+$base_path/coolfunc_calc_bolo.sh ${T_file_center} $abund $nh $z cfunc_bolo.dat
+$base_path/coolfunc_calc.sh ${T_file_center} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_cnt_ratio_center.txt
+# fit sbp (double-beta)
+$base_path/fit_dbeta_sbp $sbp_cfg 2> /dev/null
+mv -fv dbeta_param.txt dbeta_param_center.txt
+cat dbeta_param_center.txt
+mv -fv sbp_fit.qdp sbp_fit_center.qdp
+mv -fv rho_fit.qdp rho_fit_center.qdp
+mv -fv rho_fit.dat rho_fit_center.dat
+$base_path/fit_nfw_mass mass_int.dat $z 2> /dev/null
+mv -fv nfw_param.txt nfw_param_center.txt
+mv -fv nfw_fit_result.qdp nfw_fit_center.qdp
+mv -fv nfw_dump.qdp mass_int_center.qdp
+mv -fv overdensity.qdp overdensity_center.qdp
+mv -fv gas_mass_int.qdp gas_mass_int_center.qdp
+
+#exit 233
+
+## cooling time
+$base_path/cooling_time rho_fit_center.dat ${T_file_center} cfunc_bolo.dat $dl $cm_per_pixel > cooling_time.dat
+## radius to calculate tcool, not the cooling time!
+rcool=`$base_path/analyze_mass_profile.py 500 c | grep '^r500' | awk -F "=" '{print .048*$2}'`
+printf "rcool= ${rcool}\n"
+
+## center value {{{
+if [ "${CENTER_VAL}" = "YES" ]; then
+ $base_path/analyze_mass_profile.py 200 c
+ $base_path/analyze_mass_profile.py 500 c
+ $base_path/extract_tcool.py $rcool | tee cooling_time_result.txt
+ exit 0
+fi
+## center value }}}
+
+# clean previous files
+rm -f summary_shuffle_mass_profile.qdp
+rm -f summary_overdensity.qdp
+rm -f summary_mass_profile.qdp
+rm -f summary_gas_mass_profile.qdp
+
+## count
+COUNT=1
+
+#100 times of Monte-carlo simulation to determine error
+#just repeat above steps
+printf "\n+++++++++++++++++++ Monte Carlo +++++++++++++++++++++\n"
+for i in `seq 1 100`; do
+ # echo $t_data_file
+ $base_path/shuffle_T.py $t_data_file temp_shuffled_t.dat
+ $base_path/shuffle_sbp.py $sbp_data_file temp_shuffled_sbp.dat
+ # t_data_file=temp_shuffled_t.dat
+#exit
+
+ if [ "$t_profile_type" = "zyy" ]; then
+ $base_path/fit_zyy_model temp_shuffled_t.dat $t_param_file $cm_per_pixel
+ mv -f zyy_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "m0603246" ]; then
+ $base_path/fit_m0603246 temp_shuffled_t.dat $cm_per_pixel
+ mv -f m0603246_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "wang2012" ]; then
+ $base_path/fit_wang2012_model temp_shuffled_t.dat $t_param_file $cm_per_pixel 2> /dev/null
+ mv -f wang2012_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "allen" ]; then
+ $base_path/fit_allen_model temp_shuffled_t.dat $cm_per_pixel
+ mv -f allen_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "zzl" ]; then
+ $base_path/fit_zzl_model temp_shuffled_t.dat $t_param_file
+ mv -f zzl_dump.qdp ${T_file}
+ else
+ printf "ERROR: temperature profile name \`${t_profile_type}' invalid!\n"
+ exit 10
+ fi
+
+#exit
+ : > temp_sbp.cfg
+
+ cat $sbp_cfg | while read l; do
+ if echo $l | grep -q 'sbp_file'; then
+ echo "sbp_file temp_shuffled_sbp.dat" >> temp_sbp.cfg
+ elif echo $l | grep -q 'T_file'; then
+ echo "T_file ${T_file}" >> temp_sbp.cfg
+ else
+ echo $l >> temp_sbp.cfg
+ fi
+ done
+
+ ## count
+ printf "## ${COUNT} ##\n"
+ COUNT=`expr ${COUNT} + 1`
+ printf "## `pwd` ##\n"
+ $base_path/coolfunc_calc.sh ${T_file} ${abund} ${nh} ${z} ${cfunc_file}
+
+ # double-beta model
+ $base_path/fit_dbeta_sbp temp_sbp.cfg 2> /dev/null
+ [ -r "dbeta_param.txt" ] && cat dbeta_param.txt
+ $base_path/fit_nfw_mass mass_int.dat $z 2> /dev/null
+ cat nfw_dump.qdp >> summary_mass_profile.qdp
+ echo "no no no" >> summary_mass_profile.qdp
+
+ cat overdensity.qdp >> summary_overdensity.qdp
+ echo "no no no" >> summary_overdensity.qdp
+
+ cat gas_mass_int.qdp >> summary_gas_mass_profile.qdp
+ echo "no no no" >> summary_gas_mass_profile.qdp
+
+done # end `while'
+printf "\n+++++++++++++++++++ Monte Carlo +++++++++++++++++++++\n"
+
+#analys the errors
+printf "\n+++++++++++++++ RESULTS (double-beta) +++++++++++++++\n"
+RESULT="results_dbeta_mrl.txt"
+[ -e "${RESULT}" ] && mv -fv ${RESULT} ${RESULT}_bak
+
+$base_path/analyze_mass_profile.py 200 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 500 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 1500 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 2500 | tee -a ${RESULT}
+
+R200_VAL=`grep '^r200' ${RESULT} | awk '{ print $2 }'`
+R500_VAL=`grep '^r500' ${RESULT} | awk '{ print $2 }'`
+printf "\n## R200: ${R200_VAL}, R500: ${R500_VAL}\n"
+L200=`$base_path/calc_lx $radius_sbp_file flux_cnt_ratio_center.txt $z ${R200_VAL} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500=`$base_path/calc_lx $radius_sbp_file flux_cnt_ratio_center.txt $z ${R500_VAL} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "L200= ${L200} erg/s\n" | tee -a ${RESULT}
+printf "L500= ${L500} erg/s\n" | tee -a ${RESULT}
+$base_path/extract_tcool.py $rcool | tee cooling_time_result.txt
+printf "\n+++++++++++++++++++++++++++++++++++++++++++++++++++++\n"
+
diff --git a/mod_ly/fit_dbeta_nfw_mass_profile2.sh b/mod_ly/fit_dbeta_nfw_mass_profile2.sh
new file mode 100755
index 0000000..13a78dd
--- /dev/null
+++ b/mod_ly/fit_dbeta_nfw_mass_profile2.sh
@@ -0,0 +1,235 @@
+#!/bin/sh
+
+# modified by LIweitaNux, 2012/09/06
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -eq 1 ]; then
+ cfg_file="$1"
+elif [ $# -eq 2 ]; then
+ cfg_file="$1"
+ CENTER_VAL="YES"
+else
+ echo "Usage: $0 <cfg_file> [c]"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+printf "## base_path: \`${base_path}'\n"
+printf "## use configuration file: \`${cfg_file}'\n"
+
+#initialize profile type name
+t_profile_type=`grep '^t_profile' $cfg_file | awk '{print $2}'`
+printf "## t_profile_type: \`$t_profile_type'\n"
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+# cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+da=`python -c "print($cm_per_pixel/(.492/3600/180*3.1415926))"`
+dl=`python -c "print($da*(1+$z)**2)"`
+printf "da= ${da}\n"
+printf "dl= ${dl}\n"
+## sbp {{{
+sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+# mv -f _tmp_rsbp.txt ${radius_sbp_file}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+# determine which temperature profile to be used, and fit the T profile {{{
+if [ "$t_profile_type" = "zyy" ]; then
+ $base_path/fit_zyy_model $t_data_file $t_param_file $cm_per_pixel
+ # mv -f zyy_dump.qdp ${T_file}
+ mv -f zyy_dump.qdp zyy_dump_center.qdp
+elif [ "$t_profile_type" = "m0603246" ]; then
+ $base_path/fit_m0603246 $t_data_file $cm_per_pixel
+ # mv -f m0603246_dump.qdp ${T_file}
+ mv -f m0603246_dump.qdp m0603246_dump_center.qdp
+elif [ "$t_profile_type" = "wang2012" ]; then
+ $base_path/fit_wang2012_model $t_data_file $t_param_file $cm_per_pixel 2> /dev/null | tee wang2012_center_param.txt
+ # mv -f wang2012_dump.qdp ${T_file}
+ cp -fv wang2012_dump.qdp ${T_file}
+ T_file_center="wang2012_dump_center.qdp"
+ mv -fv wang2012_dump.qdp ${T_file_center}
+ mv -fv fit_result.qdp wang2012_fit_center.qdp
+elif [ "$t_profile_type" = "allen" ]; then
+ $base_path/fit_allen_model $t_data_file $cm_per_pixel
+ # mv -f allen_dump.qdp ${T_file}
+ mv -f allen_dump.qdp allen_dump_center.qdp
+elif [ "$t_profile_type" = "zzl" ]; then
+ $base_path/fit_zzl_model $t_data_file $t_param_file
+ # mv -f zzl_dump.qdp ${T_file}
+ mv -f zzl_dump.qdp zzl_dump_center.qdp
+else
+ printf "ERROR: temperature profile name \`${t_profile_type}' invalid!\n"
+ exit 10
+fi
+# temp profile }}}
+
+$base_path/coolfunc_calc_bolo.sh ${T_file_center} $abund $nh $z cfunc_bolo.dat
+$base_path/coolfunc_calc.sh ${T_file_center} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_cnt_ratio_center.txt
+# fit sbp (double-beta)
+$base_path/fit_dbeta_sbp2 $sbp_cfg 2> /dev/null
+mv -fv dbeta_param.txt dbeta_param_center.txt
+cat dbeta_param_center.txt
+mv -fv sbp_fit.qdp sbp_fit_center.qdp
+mv -fv rho_fit.qdp rho_fit_center.qdp
+mv -fv rho_fit.dat rho_fit_center.dat
+$base_path/fit_nfw_mass mass_int.dat $z 2> /dev/null
+mv -fv nfw_param.txt nfw_param_center.txt
+mv -fv nfw_fit_result.qdp nfw_fit_center.qdp
+mv -fv nfw_dump.qdp mass_int_center.qdp
+mv -fv overdensity.qdp overdensity_center.qdp
+mv -fv gas_mass_int.qdp gas_mass_int_center.qdp
+
+#exit 233
+
+## cooling time
+$base_path/cooling_time rho_fit_center.dat ${T_file_center} cfunc_bolo.dat $dl $cm_per_pixel > cooling_time.dat
+## radius to calculate tcool, not the cooling time!
+rcool=`$base_path/analyze_mass_profile.py 500 c | grep '^r500' | awk -F "=" '{print .048*$2}'`
+printf "rcool= ${rcool}\n"
+
+## center value {{{
+if [ "${CENTER_VAL}" = "YES" ]; then
+ $base_path/analyze_mass_profile.py 200 c
+ $base_path/analyze_mass_profile.py 500 c
+ $base_path/extract_tcool.py $rcool | tee cooling_time_result.txt
+ exit 0
+fi
+## center value }}}
+
+# clean previous files
+rm -f summary_shuffle_mass_profile.qdp
+rm -f summary_overdensity.qdp
+rm -f summary_mass_profile.qdp
+rm -f summary_gas_mass_profile.qdp
+
+## count
+COUNT=1
+
+#100 times of Monte-carlo simulation to determine error
+#just repeat above steps
+printf "\n+++++++++++++++++++ Monte Carlo +++++++++++++++++++++\n"
+for i in `seq 1 100`; do
+ # echo $t_data_file
+ $base_path/shuffle_T.py $t_data_file temp_shuffled_t.dat
+ $base_path/shuffle_sbp.py $sbp_data_file temp_shuffled_sbp.dat
+ # t_data_file=temp_shuffled_t.dat
+#exit
+
+ if [ "$t_profile_type" = "zyy" ]; then
+ $base_path/fit_zyy_model temp_shuffled_t.dat $t_param_file $cm_per_pixel
+ mv -f zyy_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "m0603246" ]; then
+ $base_path/fit_m0603246 temp_shuffled_t.dat $cm_per_pixel
+ mv -f m0603246_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "wang2012" ]; then
+ $base_path/fit_wang2012_model temp_shuffled_t.dat $t_param_file $cm_per_pixel 2> /dev/null
+ mv -f wang2012_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "allen" ]; then
+ $base_path/fit_allen_model temp_shuffled_t.dat $cm_per_pixel
+ mv -f allen_dump.qdp ${T_file}
+ elif [ "$t_profile_type" = "zzl" ]; then
+ $base_path/fit_zzl_model temp_shuffled_t.dat $t_param_file
+ mv -f zzl_dump.qdp ${T_file}
+ else
+ printf "ERROR: temperature profile name \`${t_profile_type}' invalid!\n"
+ exit 10
+ fi
+
+#exit
+ : > temp_sbp.cfg
+
+ cat $sbp_cfg | while read l; do
+ if echo $l | grep -q 'sbp_file'; then
+ echo "sbp_file temp_shuffled_sbp.dat" >> temp_sbp.cfg
+ elif echo $l | grep -q 'T_file'; then
+ echo "T_file ${T_file}" >> temp_sbp.cfg
+ else
+ echo $l >> temp_sbp.cfg
+ fi
+ done
+
+ ## count
+ printf "## ${COUNT} ##\n"
+ COUNT=`expr ${COUNT} + 1`
+ printf "## `pwd` ##\n"
+ $base_path/coolfunc_calc.sh ${T_file} ${abund} ${nh} ${z} ${cfunc_file}
+
+ # double-beta model
+ $base_path/fit_dbeta_sbp2 temp_sbp.cfg 2> /dev/null
+ [ -r "dbeta_param.txt" ] && cat dbeta_param.txt
+ $base_path/fit_nfw_mass mass_int.dat $z 2> /dev/null
+ cat nfw_dump.qdp >> summary_mass_profile.qdp
+ echo "no no no" >> summary_mass_profile.qdp
+
+ cat overdensity.qdp >> summary_overdensity.qdp
+ echo "no no no" >> summary_overdensity.qdp
+
+ cat gas_mass_int.qdp >> summary_gas_mass_profile.qdp
+ echo "no no no" >> summary_gas_mass_profile.qdp
+
+done # end `while'
+printf "\n+++++++++++++++++++ Monte Carlo +++++++++++++++++++++\n"
+
+#analys the errors
+printf "\n+++++++++++++++ RESULTS (double-beta) +++++++++++++++\n"
+RESULT="results_dbeta_mrl.txt"
+[ -e "${RESULT}" ] && mv -fv ${RESULT} ${RESULT}_bak
+
+$base_path/analyze_mass_profile.py 200 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 500 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 1500 | tee -a ${RESULT}
+$base_path/analyze_mass_profile.py 2500 | tee -a ${RESULT}
+
+R200_VAL=`grep '^r200' ${RESULT} | awk '{ print $2 }'`
+R500_VAL=`grep '^r500' ${RESULT} | awk '{ print $2 }'`
+printf "\n## R200: ${R200_VAL}, R500: ${R500_VAL}\n"
+L200=`$base_path/calc_lx $radius_sbp_file flux_cnt_ratio_center.txt $z ${R200_VAL} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500=`$base_path/calc_lx $radius_sbp_file flux_cnt_ratio_center.txt $z ${R500_VAL} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "L200= ${L200} erg/s\n" | tee -a ${RESULT}
+printf "L500= ${L500} erg/s\n" | tee -a ${RESULT}
+$base_path/extract_tcool.py $rcool | tee cooling_time_result.txt
+printf "\n+++++++++++++++++++++++++++++++++++++++++++++++++++++\n"
+
diff --git a/mod_ly/fit_dbeta_nfw_mass_profile_simple.sh b/mod_ly/fit_dbeta_nfw_mass_profile_simple.sh
new file mode 100755
index 0000000..44999e3
--- /dev/null
+++ b/mod_ly/fit_dbeta_nfw_mass_profile_simple.sh
@@ -0,0 +1,135 @@
+#!/bin/sh
+
+# modified by LIweitaNux, 2012/09/06
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -eq 1 ]; then
+ cfg_file="$1"
+elif [ $# -eq 2 ]; then
+ cfg_file="$1"
+ CENTER_VAL="YES"
+else
+ echo "Usage: $0 <cfg_file> [c]"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+printf "## base_path: \`${base_path}'\n"
+printf "## use configuration file: \`${cfg_file}'\n"
+
+#initialize profile type name
+t_profile_type=`grep '^t_profile' $cfg_file | awk '{print $2}'`
+printf "## t_profile_type: \`$t_profile_type'\n"
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+# cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+da=`python -c "print($cm_per_pixel/(.492/3600/180*3.1415926))"`
+dl=`python -c "print($da*(1+$z)**2)"`
+## sbp {{{
+sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+# mv -f _tmp_rsbp.txt ${radius_sbp_file}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+# determine which temperature profile to be used, and fit the T profile {{{
+if [ "$t_profile_type" = "zyy" ]; then
+ $base_path/fit_zyy_model $t_data_file $t_param_file $cm_per_pixel
+ # mv -f zyy_dump.qdp ${T_file}
+ mv -f zyy_dump.qdp zyy_dump_center.qdp
+elif [ "$t_profile_type" = "m0603246" ]; then
+ $base_path/fit_m0603246 $t_data_file $cm_per_pixel
+ # mv -f m0603246_dump.qdp ${T_file}
+ mv -f m0603246_dump.qdp m0603246_dump_center.qdp
+elif [ "$t_profile_type" = "wang2012" ]; then
+ $base_path/fit_wang2012_model $t_data_file $t_param_file $cm_per_pixel 2> /dev/null | tee wang2012_center_param.txt
+ # mv -f wang2012_dump.qdp ${T_file}
+ cp -fv wang2012_dump.qdp ${T_file}
+ T_file_center="wang2012_dump_center.qdp"
+ mv -fv wang2012_dump.qdp ${T_file_center}
+ mv -fv fit_result.qdp wang2012_fit_center.qdp
+elif [ "$t_profile_type" = "allen" ]; then
+ $base_path/fit_allen_model $t_data_file $cm_per_pixel
+ # mv -f allen_dump.qdp ${T_file}
+ mv -f allen_dump.qdp allen_dump_center.qdp
+elif [ "$t_profile_type" = "zzl" ]; then
+ $base_path/fit_zzl_model $t_data_file $t_param_file
+ # mv -f zzl_dump.qdp ${T_file}
+ mv -f zzl_dump.qdp zzl_dump_center.qdp
+else
+ printf "ERROR: temperature profile name \`${t_profile_type}' invalid!\n"
+ exit 10
+fi
+# temp profile }}}
+
+$base_path/coolfunc_calc_bolo.sh ${T_file_center} $abund $nh $z cfunc_bolo.dat
+$base_path/coolfunc_calc.sh ${T_file_center} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_cnt_ratio_center.txt
+# fit sbp (double-beta)
+$base_path/fit_dbeta_sbp $sbp_cfg 2> /dev/null
+mv -fv dbeta_param.txt dbeta_param_center.txt
+mv -fv sbp_fit.qdp sbp_fit_center.qdp
+mv -fv rho_fit.qdp rho_fit_center.qdp
+$base_path/fit_nfw_mass mass_int.dat $z 2> /dev/null
+mv -fv nfw_param.txt nfw_param_center.txt
+mv -fv nfw_fit_result.qdp nfw_fit_center.qdp
+mv -fv nfw_dump.qdp mass_int_center.qdp
+mv -fv overdensity.qdp overdensity_center.qdp
+mv -fv gas_mass_int.qdp gas_mass_int_center.qdp
+
+## cooling time
+$base_path/cooling_time rho_fit_center.dat ${T_file_center} cfunc_bolo.dat $dl $cm_per_pixel > cooling_time.dat
+## radius to calculate tcool, not the cooling time!
+rcool=`$base_path/analyze_mass_profile.py 500 c | grep '^r500' | awk -F "=" '{print .048*$2}'`
+
+## center value {{{
+if [ "${CENTER_VAL}" = "YES" ]; then
+ $base_path/analyze_mass_profile.py 200 c
+ $base_path/analyze_mass_profile.py 500 c
+ exit 0
+fi
+## center value }}}
+
+exit 0
+
diff --git a/mod_ly/luminosity_0.1-2.4.sh b/mod_ly/luminosity_0.1-2.4.sh
new file mode 100755
index 0000000..91c94c1
--- /dev/null
+++ b/mod_ly/luminosity_0.1-2.4.sh
@@ -0,0 +1,77 @@
+#!/bin/sh
+
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -ne 1 ]; then
+ echo "Usage: $0 <cfg_file>"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+printf "## base_path: \`${base_path}'\n"
+cfg_file="$1"
+printf "## use configuration file: \`${cfg_file}'\n"
+
+read -p "R200:" R200
+read -p "R500:" R500
+
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+#t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+## sbp {{{
+#sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+mv -fv flux_cnt_ratio.txt flux_bolo_cnt_ratio.txt
+mv -fv flux_cnt_ratio_center.txt flux_bolo_cnt_ratio_center.txt
+
+$base_path/coolfunc_0.1-2.4_calc.sh ${T_file} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_0.1-2.4_cnt_ratio_center.txt
+
+L200_flux0124=`$base_path/calc_lx $radius_sbp_file flux_0.1-2.4_cnt_ratio_center.txt $z ${R200} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500_flux0124=`$base_path/calc_lx $radius_sbp_file flux_0.1-2.4_cnt_ratio_center.txt $z ${R500} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "L200_0.1-2.4= ${L200_flux0124} erg/s\n"
+printf "L500_0.1-2.4= ${L500_flux0124} erg/s\n"
+
diff --git a/mod_ly/luminosity_0.1-2.4_check.sh b/mod_ly/luminosity_0.1-2.4_check.sh
new file mode 100755
index 0000000..6614446
--- /dev/null
+++ b/mod_ly/luminosity_0.1-2.4_check.sh
@@ -0,0 +1,80 @@
+#!/bin/sh
+
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -ne 3 ]; then
+ echo "Usage: $0 <cfg_file> <R200> <R500>"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+#printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+#printf "## base_path: \`${base_path}'\n"
+cfg_file="$1"
+#printf "## use configuration file: \`${cfg_file}'\n"
+R200=$2
+R500=$3
+
+# read -p "R200:" R200
+# read -p "R500:" R500
+
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+#t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+# abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+read -p "> abund: " abund
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+# printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+## sbp {{{
+#sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+[ -e "flux_cnt_ratio.txt" ] && mv -fv flux_cnt_ratio.txt flux_bolo_cnt_ratio.txt
+[ -e "flux_cnt_ratio_center.txt" ] && mv -fv flux_cnt_ratio_center.txt flux_bolo_cnt_ratio_center.txt
+
+$base_path/coolfunc_0.1-2.4_calc.sh ${T_file} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_0.1-2.4_cnt_ratio_center.txt
+
+L200_flux0124=`$base_path/calc_lx $radius_sbp_file flux_0.1-2.4_cnt_ratio_center.txt $z ${R200} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500_flux0124=`$base_path/calc_lx $radius_sbp_file flux_0.1-2.4_cnt_ratio_center.txt $z ${R500} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "L200_0.1-2.4= ${L200_flux0124} erg/s\n"
+printf "L500_0.1-2.4= ${L500_flux0124} erg/s\n"
+
diff --git a/mod_ly/luminosity_0.1-2.4_lwt.sh b/mod_ly/luminosity_0.1-2.4_lwt.sh
new file mode 100755
index 0000000..125bfc7
--- /dev/null
+++ b/mod_ly/luminosity_0.1-2.4_lwt.sh
@@ -0,0 +1,79 @@
+#!/bin/sh
+
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -ne 3 ]; then
+ echo "Usage: $0 <cfg_file> <R200> <R500>"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+#printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+#printf "## base_path: \`${base_path}'\n"
+cfg_file="$1"
+#printf "## use configuration file: \`${cfg_file}'\n"
+R200=$2
+R500=$3
+
+# read -p "R200:" R200
+# read -p "R500:" R500
+
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+#t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+# printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+## sbp {{{
+#sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+[ -e "flux_cnt_ratio.txt" ] && mv -fv flux_cnt_ratio.txt flux_bolo_cnt_ratio.txt
+[ -e "flux_cnt_ratio_center.txt" ] && mv -fv flux_cnt_ratio_center.txt flux_bolo_cnt_ratio_center.txt
+
+$base_path/coolfunc_0.1-2.4_calc.sh ${T_file} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_0.1-2.4_cnt_ratio_center.txt
+
+L200_flux0124=`$base_path/calc_lx $radius_sbp_file flux_0.1-2.4_cnt_ratio_center.txt $z ${R200} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500_flux0124=`$base_path/calc_lx $radius_sbp_file flux_0.1-2.4_cnt_ratio_center.txt $z ${R500} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "L200_0.1-2.4= ${L200_flux0124} erg/s\n"
+printf "L500_0.1-2.4= ${L500_flux0124} erg/s\n"
+
diff --git a/mod_ly/luminosity_0.1-2.4_lwt_rs.sh b/mod_ly/luminosity_0.1-2.4_lwt_rs.sh
new file mode 100755
index 0000000..2e4d7cf
--- /dev/null
+++ b/mod_ly/luminosity_0.1-2.4_lwt_rs.sh
@@ -0,0 +1,79 @@
+#!/bin/sh
+
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -ne 3 ]; then
+ echo "Usage: $0 <cfg_file> <R200> <R500>"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+#printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+#printf "## base_path: \`${base_path}'\n"
+cfg_file="$1"
+#printf "## use configuration file: \`${cfg_file}'\n"
+R200=$2
+R500=$3
+
+# read -p "R200:" R200
+# read -p "R500:" R500
+
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+#t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+# printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+## sbp {{{
+#sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+[ -e "flux_cnt_ratio.txt" ] && mv -fv flux_cnt_ratio.txt flux_bolo_cnt_ratio.txt
+[ -e "flux_cnt_ratio_center.txt" ] && mv -fv flux_cnt_ratio_center.txt flux_bolo_cnt_ratio_center.txt
+
+$base_path/coolfunc_0.1-2.4_calc_rs.sh ${T_file} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_0.1-2.4_cnt_ratio_center.txt
+
+L200_flux0124=`$base_path/calc_lx $radius_sbp_file flux_0.1-2.4_cnt_ratio_center.txt $z ${R200} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500_flux0124=`$base_path/calc_lx $radius_sbp_file flux_0.1-2.4_cnt_ratio_center.txt $z ${R500} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "L200_0.1-2.4= ${L200_flux0124} erg/s\n"
+printf "L500_0.1-2.4= ${L500_flux0124} erg/s\n"
+
diff --git a/mod_ly/luminosity_0.1-2.4_orig.sh b/mod_ly/luminosity_0.1-2.4_orig.sh
new file mode 100755
index 0000000..531a2b1
--- /dev/null
+++ b/mod_ly/luminosity_0.1-2.4_orig.sh
@@ -0,0 +1,72 @@
+#!/bin/sh
+
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -ne 1 ]; then
+ echo "Usage: $0 <cfg_file>"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+printf "## base_path: \`${base_path}'\n"
+cfg_file="$1"
+printf "## use configuration file: \`${cfg_file}'\n"
+
+read -p "R200:" R200
+read -p "R500:" R500
+
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+#t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+## sbp {{{
+#sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+
+mv -fv flux_cnt_ratio.txt flux_bolo_cnt_ratio.txt
+mv -fv flux_cnt_ratio_center.txt flux_bolo_cnt_ratio_center.txt
+
+$base_path/coolfunc_0.1-2.4_calc.sh ${T_file} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_0.1-2.4_cnt_ratio_center.txt
+
+$base_path/coolfunc_0.1-2.4_calc.sh ${T_file} ${abund} ${nh} ${z} ${cfunc_file}
+mv -fv flux_cnt_ratio.txt flux_0.1-2.4_cnt_ratio.txt
+
+RESULT="$HOME/data_tot/luminosity.txt"
+
+L200_flux0124=`$base_path/calc_lx $radius_sbp_file flux_0.1-2.4_cnt_ratio_center.txt $z ${R200} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500_flux0124=`$base_path/calc_lx $radius_sbp_file flux_0.1-2.4_cnt_ratio_center.txt $z ${R500} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "`pwd -P` : \n" | tee -a ${RESULT}
+printf "L200_0.1-2.4= ${L200_flux0124} erg/s\n" | tee -a ${RESULT}
+printf "L500_0.1-2.4= ${L500_flux0124} erg/s\n" | tee -a ${RESULT}
diff --git a/mod_ly/luminosity_0.5-2.sh b/mod_ly/luminosity_0.5-2.sh
new file mode 100755
index 0000000..f3ed643
--- /dev/null
+++ b/mod_ly/luminosity_0.5-2.sh
@@ -0,0 +1,77 @@
+#!/bin/sh
+
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -ne 1 ]; then
+ echo "Usage: $0 <cfg_file>"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+printf "## base_path: \`${base_path}'\n"
+cfg_file="$1"
+printf "## use configuration file: \`${cfg_file}'\n"
+
+read -p "R200:" R200
+read -p "R500:" R500
+
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+#t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+## sbp {{{
+#sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+mv -fv flux_cnt_ratio.txt flux_bolo_cnt_ratio.txt
+mv -fv flux_cnt_ratio_center.txt flux_bolo_cnt_ratio_center.txt
+
+$base_path/coolfunc_0.5-2_calc.sh ${T_file} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_0.5-2_cnt_ratio_center.txt
+
+L200_flux0520=`$base_path/calc_lx $radius_sbp_file flux_0.5-2_cnt_ratio_center.txt $z ${R200} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500_flux0520=`$base_path/calc_lx $radius_sbp_file flux_0.5-2_cnt_ratio_center.txt $z ${R500} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "L200_0.5-2= ${L200_flux0520} erg/s\n"
+printf "L500_0.5-2= ${L500_flux0520} erg/s\n"
+
diff --git a/mod_ly/luminosity_0.5-2_lwt.sh b/mod_ly/luminosity_0.5-2_lwt.sh
new file mode 100755
index 0000000..8fe5f44
--- /dev/null
+++ b/mod_ly/luminosity_0.5-2_lwt.sh
@@ -0,0 +1,79 @@
+#!/bin/sh
+
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -ne 3 ]; then
+ echo "Usage: $0 <cfg_file> <R200> <R500>"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+#printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+#printf "## base_path: \`${base_path}'\n"
+cfg_file="$1"
+#printf "## use configuration file: \`${cfg_file}'\n"
+R200=$2
+R500=$3
+
+#read -p "R200:" R200
+#read -p "R500:" R500
+
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+#t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+#printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+## sbp {{{
+#sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+[ -e "flux_cnt_ratio.txt" ] && mv -fv flux_cnt_ratio.txt flux_bolo_cnt_ratio.txt
+[ -e "flux_cnt_ratio_center.txt" ] && mv -fv flux_cnt_ratio_center.txt flux_bolo_cnt_ratio_center.txt
+
+$base_path/coolfunc_0.5-2_calc.sh ${T_file} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_0.5-2_cnt_ratio_center.txt
+
+L200_flux0520=`$base_path/calc_lx $radius_sbp_file flux_0.5-2_cnt_ratio_center.txt $z ${R200} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500_flux0520=`$base_path/calc_lx $radius_sbp_file flux_0.5-2_cnt_ratio_center.txt $z ${R500} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "L200_0.5-2= ${L200_flux0520} erg/s\n"
+printf "L500_0.5-2= ${L500_flux0520} erg/s\n"
+
diff --git a/mod_ly/luminosity_0.5-2_lwt_rs.sh b/mod_ly/luminosity_0.5-2_lwt_rs.sh
new file mode 100755
index 0000000..5019013
--- /dev/null
+++ b/mod_ly/luminosity_0.5-2_lwt_rs.sh
@@ -0,0 +1,79 @@
+#!/bin/sh
+
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -ne 3 ]; then
+ echo "Usage: $0 <cfg_file> <R200> <R500>"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+#printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+#printf "## base_path: \`${base_path}'\n"
+cfg_file="$1"
+#printf "## use configuration file: \`${cfg_file}'\n"
+R200=$2
+R500=$3
+
+#read -p "R200:" R200
+#read -p "R500:" R500
+
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+#t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+#printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+## sbp {{{
+#sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+if [ "x$radius_sbp_file" = "x" ]; then
+ echo "ERROR, must have radius_sbp_file assigned, this file should be a 4-column file, which contains the radius, radius err, sbp, and sbp err"
+ exit 200
+fi
+
+TMP_RSBP="_tmp_rsbp.txt"
+[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+cat ${radius_sbp_file} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+radius_sbp_file="${TMP_RSBP}"
+## sbp }}}
+
+[ -e "flux_cnt_ratio.txt" ] && mv -fv flux_cnt_ratio.txt flux_bolo_cnt_ratio.txt
+[ -e "flux_cnt_ratio_center.txt" ] && mv -fv flux_cnt_ratio_center.txt flux_bolo_cnt_ratio_center.txt
+
+$base_path/coolfunc_0.5-2_calc_rs.sh ${T_file} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_0.5-2_cnt_ratio_center.txt
+
+L200_flux0520=`$base_path/calc_lx $radius_sbp_file flux_0.5-2_cnt_ratio_center.txt $z ${R200} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500_flux0520=`$base_path/calc_lx $radius_sbp_file flux_0.5-2_cnt_ratio_center.txt $z ${R500} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "L200_0.5-2= ${L200_flux0520} erg/s\n"
+printf "L500_0.5-2= ${L500_flux0520} erg/s\n"
+
diff --git a/mod_ly/luminosity_0.5-2_orig.sh b/mod_ly/luminosity_0.5-2_orig.sh
new file mode 100755
index 0000000..228d2c0
--- /dev/null
+++ b/mod_ly/luminosity_0.5-2_orig.sh
@@ -0,0 +1,72 @@
+#!/bin/sh
+
+export PATH="/usr/local/bin:/usr/bin:/bin:$PATH"
+
+if [ $# -ne 1 ]; then
+ echo "Usage: $0 <cfg_file>"
+ exit 1
+fi
+
+if ! which xspec > /dev/null; then
+ printf "ERROR: please initialize HEASOFT first\n"
+ exit 2
+fi
+
+if [ -z "${HEADAS}" ]; then
+ printf "ERROR: variable \`HEADAS' not properly set\n"
+ exit 3
+fi
+
+export PGPLOT_FONT="${HEADAS}/lib/grfont.dat"
+printf "## PGPLOT_FONT: \`${PGPLOT_FONT}'\n"
+
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+printf "## base_path: \`${base_path}'\n"
+cfg_file="$1"
+printf "## use configuration file: \`${cfg_file}'\n"
+
+read -p "R200:" R200
+read -p "R500:" R500
+
+#initialize data file name
+t_data_file=`grep '^t_data_file' $cfg_file | awk '{print $2}'`
+#t_param_file=`grep '^t_param_file' $cfg_file | awk '{print $2}'`
+#initialize sbp config file
+sbp_cfg=`grep '^sbp_cfg' $cfg_file | awk '{print $2}'`
+#initialize the temperature profile file
+T_file=`grep '^T_file' $sbp_cfg | awk '{print $2}'`
+cfunc_file=`grep '^cfunc_file' ${sbp_cfg} |awk '{print $2}'`
+abund=`grep '^abund' ${cfg_file} |awk '{print $2}'`
+nh=`grep '^nh' ${cfg_file} |awk '{print $2}'`
+## calc `cm_per_pixel' instead {{{
+cm_per_pixel=`grep '^cm_per_pixel' $sbp_cfg|awk '{print $2}'`
+z=`grep '^z' $sbp_cfg | awk '{ print $2 }'`
+cm_per_pixel=`${base_path}/calc_distance ${z} | grep 'cm_per_pixel' | awk '{ print $2 }'`
+sed -i'' "s/^cm_per_pixel.*$/cm_per_pixel ${cm_per_pixel}/" ${sbp_cfg}
+printf "## redshift: ${z}, cm_per_pixel: ${cm_per_pixel}\n"
+## cm_per_pixel }}}
+## sbp {{{
+#sbp_data_file=`grep '^sbp_file' $sbp_cfg | awk '{print $2}'`
+radius_sbp_file=`grep '^radius_sbp_file' ${cfg_file} | awk '{print $2}'`
+
+mv -fv flux_cnt_ratio.txt flux_bolo_cnt_ratio.txt
+mv -fv flux_cnt_ratio_center.txt flux_bolo_cnt_ratio_center.txt
+
+$base_path/coolfunc_0.5-2_calc.sh ${T_file} $abund $nh $z $cfunc_file
+mv -fv flux_cnt_ratio.txt flux_0.5-2_cnt_ratio_center.txt
+
+$base_path/coolfunc_0.5-2_calc.sh ${T_file} ${abund} ${nh} ${z} ${cfunc_file}
+mv -fv flux_cnt_ratio.txt flux_0.5-2_cnt_ratio.txt
+
+RESULT="$HOME/data_tot/luminosity.txt"
+
+L200_flux0520=`$base_path/calc_lx $radius_sbp_file flux_0.5-2_cnt_ratio_center.txt $z ${R200} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+L500_flux0520=`$base_path/calc_lx $radius_sbp_file flux_0.5-2_cnt_ratio_center.txt $z ${R500} $t_data_file | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+printf "`pwd -P` : \n" | tee -a ${RESULT}
+printf "L200_0.5-2= ${L200_flux0520} erg/s\n" | tee -a ${RESULT}
+printf "L500_0.5-2= ${L500_flux0520} erg/s\n" | tee -a ${RESULT}
diff --git a/mod_ly/lx_simple_gxy.sh b/mod_ly/lx_simple_gxy.sh
new file mode 100755
index 0000000..7ce581a
--- /dev/null
+++ b/mod_ly/lx_simple_gxy.sh
@@ -0,0 +1,104 @@
+#!/bin/sh
+#
+unalias -a
+export LC_COLLATE=C
+# fix path for python
+export PATH="/usr/bin:$PATH"
+###########################################################
+## calc lx, based on `calc_lx', modified for gxy ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## November 3, 2012 ##
+###########################################################
+
+## usage, `path_conffile' is the configuration file
+## which contains the `path' to each `repro/mass' directory
+if [ $# -eq 1 ]; then
+ # process dir
+ MDIR="$1"
+elif [ $# -eq 2 ]; then
+ # process dir
+ MDIR="$1"
+ # separate log file
+ MAIN_LOG="`pwd -P`/`basename $2`"
+else
+ printf "usage:\n"
+ printf " `basename $0` <mass_dir> [ <logfile> ]\n"
+ printf "\nNOTE:\n"
+ printf " script cannot handle \`~' in path\n"
+ exit 1
+fi
+
+if [ ! -d "${MDIR}" ]; then
+ printf "## ERROR: given \`${MDIR}' not a directory\n"
+ exit 2
+fi
+if [ -d "${MAIN_LOG}" ]; then
+ printf "## ERROR: given \`${MAIN_LOG}' IS a file\n"
+ exit 3
+fi
+
+## set the path to the script {{{
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+LX1_SCRIPT="$base_path/luminosity_0.1-2.4_lwt.sh"
+LX2_SCRIPT="$base_path/luminosity_0.5-2_lwt.sh"
+if [ ! -e "${LX1_SCRIPT}" ] || [ ! -e "${LX2_SCRIPT}" ]; then
+ printf "ERROR: \`LX_SCRIPT' not exist\n"
+ exit 250
+fi
+## script path }}}
+
+# result lines
+RES_LINE=100
+
+# process
+cd ${MDIR}
+printf "Entered dir: \``pwd`'\n"
+# mass fitting conf
+MCONF="`ls fitting_mass.conf global.cfg 2> /dev/null | head -n 1`"
+if [ ! -r "${MCONF}" ]; then
+ printf "ERROR: main configuration file not accessiable\n"
+ exit 10
+else
+ printf "## use main configuration file: \`${MCONF}'\n"
+ LOGFILE="lx_gxy_`date '+%Y%m%d%H'`.log"
+ [ -e "${LOGFILE}" ] && mv -fv ${LOGFILE} ${LOGFILE}_bak
+ TOLOG="tee -a ${LOGFILE}"
+ printf "## use logfile: \`${LOGFILE}'\n"
+ if [ ! -z "${MAIN_LOG}" ]; then
+ printf "## separate main logfile: \`${MAIN_LOG}'\n"
+ fi
+ printf "## working directory: \``pwd -P`'\n" | ${TOLOG}
+ printf "## use configuration files: \`${MCONF}'\n" | ${TOLOG}
+ # fitting_mass logfile, get R500 from it
+ MLOG=`ls ${MCONF%.[confgCONFG]*}*.log | tail -n 1`
+ if [ ! -r "${MLOG}" ]; then
+ printf "## ERROR: mass log file not accessiable\n"
+ exit 20
+ fi
+ R500_VAL=`tail -n ${RES_LINE} ${MLOG} | grep '^r500' | head -n 1 | awk '{ print $2 }'`
+ R200_VAL=`tail -n ${RES_LINE} ${MLOG} | grep '^r200' | head -n 1 | awk '{ print $2 }'`
+ if [ -z "${R500_VAL}" ] || [ -z "${R200_VAL}" ]; then
+ printf "## ERROR: cannot get R500 or R200\n"
+ exit 30
+ fi
+ printf "## R200 (kpc): \`${R200_VAL}'\n" | ${TOLOG}
+ printf "## R500 (kpc): \`${R500_VAL}'\n" | ${TOLOG}
+ printf "## CMD: ${LX1_SCRIPT} ${MCONF} ${R200_VAL} ${R500_VAL}\n" | ${TOLOG}
+# printf "## CMD: ${LX2_SCRIPT} ${MCONF} ${R200_VAL} ${R500_VAL}\n" | ${TOLOG}
+ sh ${LX1_SCRIPT} ${MCONF} ${R200_VAL} ${R500_VAL} | ${TOLOG}
+# sh ${LX2_SCRIPT} ${MCONF} ${R200_VAL} ${R500_VAL} | ${TOLOG}
+ ## append results to main log file
+ if [ ! -z "${MAIN_LOG}" ]; then
+ printf "\n" >> ${MAIN_LOG}
+ printf "`pwd -P`\n" >> ${MAIN_LOG}
+ grep '^L[25]00' ${LOGFILE} >> ${MAIN_LOG}
+ fi
+fi
+printf "\n++++++++++++++++++++++++++++++++++++++\n"
+
diff --git a/mod_ly/lx_simple_gxy_rs.sh b/mod_ly/lx_simple_gxy_rs.sh
new file mode 100755
index 0000000..e424f4c
--- /dev/null
+++ b/mod_ly/lx_simple_gxy_rs.sh
@@ -0,0 +1,104 @@
+#!/bin/sh
+#
+unalias -a
+export LC_COLLATE=C
+# fix path for python
+export PATH="/usr/bin:$PATH"
+###########################################################
+## calc lx, based on `calc_lx', modified for gxy ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## November 3, 2012 ##
+###########################################################
+
+## usage, `path_conffile' is the configuration file
+## which contains the `path' to each `repro/mass' directory
+if [ $# -eq 1 ]; then
+ # process dir
+ MDIR="$1"
+elif [ $# -eq 2 ]; then
+ # process dir
+ MDIR="$1"
+ # separate log file
+ MAIN_LOG="`pwd -P`/`basename $2`"
+else
+ printf "usage:\n"
+ printf " `basename $0` <mass_dir> [ <logfile> ]\n"
+ printf "\nNOTE:\n"
+ printf " script cannot handle \`~' in path\n"
+ exit 1
+fi
+
+if [ ! -d "${MDIR}" ]; then
+ printf "## ERROR: given \`${MDIR}' not a directory\n"
+ exit 2
+fi
+if [ -d "${MAIN_LOG}" ]; then
+ printf "## ERROR: given \`${MAIN_LOG}' IS a file\n"
+ exit 3
+fi
+
+## set the path to the script {{{
+if [ "$0" = `basename $0` ]; then
+ script_path=`which $0`
+ base_path=`dirname ${script_path}`
+else
+ base_path=`dirname $0`
+fi
+LX1_SCRIPT="$base_path/luminosity_0.1-2.4_lwt_rs.sh"
+LX2_SCRIPT="$base_path/luminosity_0.5-2_lwt_rs.sh"
+if [ ! -e "${LX1_SCRIPT}" ] || [ ! -e "${LX2_SCRIPT}" ]; then
+ printf "ERROR: \`LX_SCRIPT' not exist\n"
+ exit 250
+fi
+## script path }}}
+
+# result lines
+RES_LINE=100
+
+# process
+cd ${MDIR}
+printf "Entered dir: \``pwd`'\n"
+# mass fitting conf
+MCONF="`ls fitting_mass.conf global.cfg 2> /dev/null | head -n 1`"
+if [ ! -r "${MCONF}" ]; then
+ printf "ERROR: main configuration file not accessiable\n"
+ exit 10
+else
+ printf "## use main configuration file: \`${MCONF}'\n"
+ LOGFILE="lx_gxy_`date '+%Y%m%d%H'`.log"
+ [ -e "${LOGFILE}" ] && mv -fv ${LOGFILE} ${LOGFILE}_bak
+ TOLOG="tee -a ${LOGFILE}"
+ printf "## use logfile: \`${LOGFILE}'\n"
+ if [ ! -z "${MAIN_LOG}" ]; then
+ printf "## separate main logfile: \`${MAIN_LOG}'\n"
+ fi
+ printf "## working directory: \``pwd -P`'\n" | ${TOLOG}
+ printf "## use configuration files: \`${MCONF}'\n" | ${TOLOG}
+ # fitting_mass logfile, get R500 from it
+ MLOG=`ls ${MCONF%.[confgCONFG]*}*.log | tail -n 1`
+ if [ ! -r "${MLOG}" ]; then
+ printf "## ERROR: mass log file not accessiable\n"
+ exit 20
+ fi
+ R500_VAL=`tail -n ${RES_LINE} ${MLOG} | grep '^r500' | head -n 1 | awk '{ print $2 }'`
+ R200_VAL=`tail -n ${RES_LINE} ${MLOG} | grep '^r200' | head -n 1 | awk '{ print $2 }'`
+ if [ -z "${R500_VAL}" ] || [ -z "${R200_VAL}" ]; then
+ printf "## ERROR: cannot get R500 or R200\n"
+ exit 30
+ fi
+ printf "## R200 (kpc): \`${R200_VAL}'\n" | ${TOLOG}
+ printf "## R500 (kpc): \`${R500_VAL}'\n" | ${TOLOG}
+ printf "## CMD: ${LX1_SCRIPT} ${MCONF} ${R200_VAL} ${R500_VAL}\n" | ${TOLOG}
+# printf "## CMD: ${LX2_SCRIPT} ${MCONF} ${R200_VAL} ${R500_VAL}\n" | ${TOLOG}
+ sh ${LX1_SCRIPT} ${MCONF} ${R200_VAL} ${R500_VAL} | ${TOLOG}
+# sh ${LX2_SCRIPT} ${MCONF} ${R200_VAL} ${R500_VAL} | ${TOLOG}
+ ## append results to main log file
+ if [ ! -z "${MAIN_LOG}" ]; then
+ printf "\n" >> ${MAIN_LOG}
+ printf "`pwd -P`\n" >> ${MAIN_LOG}
+ grep '^L[25]00' ${LOGFILE} >> ${MAIN_LOG}
+ fi
+fi
+printf "\n++++++++++++++++++++++++++++++++++++++\n"
+
diff --git a/mod_ly/shuffle_T.py b/mod_ly/shuffle_T.py
new file mode 100755
index 0000000..24e7393
--- /dev/null
+++ b/mod_ly/shuffle_T.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python
+
+import sys
+import scipy
+
+output_file=open(sys.argv[2],'w')
+for i in open(sys.argv[1]):
+ r,re,c,s=i.strip().split()
+ c=float(c)
+ s=float(s)
+
+ if c>0 and s>0:
+ c1=-1
+ while c1<=0:
+ c1=scipy.random.normal(0,1)*s+c
+
+ output_file.write("%s\t%s\t%s\t%s\n"%(r,re,c1,s))
+
diff --git a/mod_ly/shuffle_sbp.py b/mod_ly/shuffle_sbp.py
new file mode 100755
index 0000000..210bfb8
--- /dev/null
+++ b/mod_ly/shuffle_sbp.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python
+
+import sys
+import scipy
+
+output_file=open(sys.argv[2],'w')
+for i in open(sys.argv[1]):
+ c,s=i.strip().split()
+ c=float(c)
+ s=float(s)
+
+ if c>0 and s>0:
+ c1=-1
+ while c1<=0:
+ c1=scipy.random.normal(0,1)*s+c
+
+ output_file.write("%s\t%s\n"%(c1,s))
+
diff --git a/opt_utilities b/opt_utilities
new file mode 160000
+Subproject 844950c94bfa87132bef00a698c1ba485602a13
diff --git a/scripts/calc_lx_simple_v2.sh b/scripts/calc_lx_simple_v2.sh
new file mode 100755
index 0000000..576a43e
--- /dev/null
+++ b/scripts/calc_lx_simple_v2.sh
@@ -0,0 +1,127 @@
+#!/bin/sh
+#
+unalias -a
+export LC_COLLATE=C
+# fix path for python
+export PATH="/usr/bin:$PATH"
+###########################################################
+## calc_lx in BATCH mode ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## August 31, 2012 ##
+## ##
+## ChangeLog: ##
+## 2014/06/18: use env variable 'MASS_PROFILE_DIR' ##
+###########################################################
+
+## usage, `path_conffile' is the configuration file
+## which contains the `path' to each `repro/mass' directory
+if [ $# -ne 1 ]; then
+ printf "usage:\n"
+ printf " `basename $0` <mass_dir>\n"
+ printf "\nNOTE:\n"
+ printf " script cannot handle \`~' in path\n"
+ exit 1
+fi
+
+## set the path to the script {{{
+if [ -n "${MASS_PROFILE_DIR}" ]; then
+ CALCLX_SCRIPT="${MASS_PROFILE_DIR}/calc_lx"
+ CFCALC_SCRIPT="${MASS_PROFILE_DIR}/coolfunc_calc.sh"
+ FIT_TPRO="${MASS_PROFILE_DIR}/fit_wang2012_model"
+else
+ printf "ERROR: environment variable 'MASS_PROFILE_DIR' not set.\n"
+ exit 2
+fi
+
+if [ -z "${CALCLX_SCRIPT}" ]; then
+ printf "ERROR: \`CALCLX_SCRIPT' not set\n"
+ exit 250
+elif [ ! -r ${CALCLX_SCRIPT} ]; then
+ printf "ERROR: CANNOT access script \`${CALCLX_SCRIPT}'\n"
+ exit 251
+fi
+## script path }}}
+
+# result lines
+RES_LINE=100
+# process dir
+MDIR="$1"
+# mass fitting conf
+MCONF="fitting_mass.conf"
+
+# process
+cd ${MDIR}
+printf "Entered dir: \``pwd`'\n"
+# conf file
+if [ ! -r "${MCONF}" ]; then
+ printf "ERROR: configuration file \`${MCONF}' not accessiable\n"
+else
+ LOGFILE="calclx_`date '+%Y%m%d%H'`.log"
+ [ -e "${LOGFILE}" ] && mv -fv ${LOGFILE} ${LOGFILE}_bak
+ TOLOG="tee -a ${LOGFILE}"
+ # fitting_mass logfile, get R500 from it
+ MLOG=`ls ${MCONF%.[confgCONFG]*}*.log | tail -n 1`
+ R500_VAL=`tail -n ${RES_LINE} ${MLOG} | grep '^r500' | awk '{ print $2 }'`
+ R200_VAL=`tail -n ${RES_LINE} ${MLOG} | grep '^r200' | awk '{ print $2 }'`
+ # radius_sbp_file {{{
+ RSBP=`grep '^radius_sbp_file' ${MCONF} | awk '{ print $2 }'`
+ TMP_RSBP="_tmp_rsbp.txt"
+ [ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
+ cat ${RSBP} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+ RSBP="${TMP_RSBP}"
+ # rsbp }}}
+ TPRO_TYPE=`grep '^t_profile' ${MCONF} | awk '{ print $2 }'`
+ TPRO_DATA=`grep '^t_data_file' ${MCONF} | awk '{ print $2 }'`
+ TPRO_PARA=`grep '^t_param_file' ${MCONF} | awk '{ print $2 }'`
+ SBP_CONF=`grep '^sbp_cfg' ${MCONF} | awk '{ print $2 }'`
+ ABUND=`grep '^abund' ${MCONF} | awk '{ print $2 }'`
+ NH=`grep '^nh' ${MCONF} | awk '{ print $2 }'`
+ Z=`grep '^z' ${SBP_CONF} | awk '{ print $2 }'`
+ cm_per_pixel=`grep '^cm_per_pixel' ${SBP_CONF} | awk '{ print $2 }'`
+ CF_FILE=`grep '^cfunc_file' ${SBP_CONF} | awk '{ print $2 }'`
+ printf "## use logfile: \`${LOGFILE}'\n"
+ printf "## working directory: \``pwd -P`'\n" | ${TOLOG}
+ printf "## use configuration files: \`${MCONF}, ${SBP_CONF}'\n" | ${TOLOG}
+ printf "## use radius_sbp_file: \`${RSBP}'\n" | ${TOLOG}
+ printf "## R500 (kpc): \`${R500_VAL}'\n" | ${TOLOG}
+ printf "## R200 (kpc): \`${R200_VAL}'\n" | ${TOLOG}
+ printf "## redshift: \`${Z}'\n" | ${TOLOG}
+ printf "## abund: \`${ABUND}'\n" | ${TOLOG}
+ printf "## nh: \`${NH}'\n" | ${TOLOG}
+ printf "## T_profile type: \`${TPRO_TYPE}'\n" | ${TOLOG}
+ printf "## cfunc_file: \`${CF_FILE}'\n" | ${TOLOG}
+ ## fit temperature profile {{{
+ T_FILE="_tpro_dump.qdp"
+ if [ "${TPRO_TYPE}" = "wang2012" ]; then
+ printf "fitting temperature profile (wang2012) ...\n"
+ [ -e "wang2012_dump.qdp" ] && mv -fv wang2012_dump.qdp wang2012_dump.qdp_bak
+ [ -e "fit_result.qdp" ] && mv -fv fit_result.qdp fit_result.qdp_bak
+ ${FIT_TPRO} ${TPRO_DATA} ${TPRO_PARA} ${cm_per_pixel} 2> /dev/null
+ mv -fv wang2012_dump.qdp ${T_FILE}
+ [ -e "wang2012_dump.qdp_bak" ] && mv -fv wang2012_dump.qdp_bak wang2012_dump.qdp
+ [ -e "fit_result.qdp_bak" ] && mv -fv fit_result.qdp_bak fit_result.qdp
+ else
+ printf "ERROR: invalid tprofile type: \`${TPRO_TYPE}'\n"
+ exit 10
+ fi
+ ## tprofile }}}
+ ## calc `flux_ratio' {{{
+ printf "calc flux_ratio ...\n"
+ CF_FILE="_cf_data.txt"
+ FLUX_RATIO="__flux_cnt_ratio.txt"
+ [ -e "flux_cnt_ratio.txt" ] && mv -fv flux_cnt_ratio.txt flux_cnt_ratio.txt_bak
+ printf "## CMD: sh ${CFCALC_SCRIPT} ${T_FILE} ${ABUND} ${NH} ${Z} ${CF_FILE}\n" | ${TOLOG}
+ sh ${CFCALC_SCRIPT} ${T_FILE} ${ABUND} ${NH} ${Z} ${CF_FILE}
+ mv -fv flux_cnt_ratio.txt ${FLUX_RATIO}
+ [ -e "flux_cnt_ratio.txt_bak" ] && mv -fv flux_cnt_ratio.txt_bak flux_cnt_ratio.txt
+ ## flux_ratio }}}
+ printf "## CMD: ${CALCLX_SCRIPT} ${RSBP} ${FLUX_RATIO} ${Z} ${R500_VAL} ${TPRO_DATA}\n" | ${TOLOG}
+ printf "## CMD: ${CALCLX_SCRIPT} ${RSBP} ${FLUX_RATIO} ${Z} ${R200_VAL} ${TPRO_DATA}\n" | ${TOLOG}
+ L500=`${CALCLX_SCRIPT} ${RSBP} ${FLUX_RATIO} ${Z} ${R500_VAL} ${TPRO_DATA} | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+ L200=`${CALCLX_SCRIPT} ${RSBP} ${FLUX_RATIO} ${Z} ${R200_VAL} ${TPRO_DATA} | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+ printf "L500= ${L500} erg/s\n" | ${TOLOG}
+ printf "L200= ${L200} erg/s\n" | ${TOLOG}
+fi
+printf "\n++++++++++++++++++++++++++++++++++++++\n"
+
diff --git a/scripts/chandra_bkg_rescale.sh b/scripts/chandra_bkg_rescale.sh
new file mode 100755
index 0000000..ae13af1
--- /dev/null
+++ b/scripts/chandra_bkg_rescale.sh
@@ -0,0 +1,54 @@
+#!/bin/sh
+#
+# background rescale, by adjusting `BACKSCAL'
+# according to the photon flux values in `9.5-12.0 keV'
+#
+# LIweitiaNux <liweitianux@gmail.com>
+# August 14, 2012
+
+## background rescale (BACKSCAL) {{{
+# rescale background according to particle background
+# energy range: 9.5-12.0 keV (channel: 651-822)
+CH_LOW=651
+CH_HI=822
+pb_flux() {
+ punlearn dmstat
+ COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | grep -i 'sum:' | awk '{ print $2 }'`
+ punlearn dmkeypar
+ EXPTIME=`dmkeypar $1 EXPOSURE echo=yes`
+ BACK=`dmkeypar $1 BACKSCAL echo=yes`
+ # fix `scientific notation' bug for `bc'
+ EXPTIME_B=`echo ${EXPTIME} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_B=`echo "( ${BACK} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ PB_FLUX=`echo "scale = 16; ${COUNTS} / ${EXPTIME_B} / ${BACK_B}" | bc -l`
+ echo ${PB_FLUX}
+}
+
+bkg_rescale() {
+ # $1: src spectrum, $2: back spectrum
+ PBFLUX_SRC=`pb_flux $1`
+ PBFLUX_BKG=`pb_flux $2`
+ BACK_OLD=`dmkeypar $2 BACKSCAL echo=yes`
+ BACK_OLD_B=`echo "( ${BACK_OLD} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_NEW=`echo "scale = 16; ${BACK_OLD_B} * ${PBFLUX_BKG} / ${PBFLUX_SRC}" | bc -l`
+ printf "\`$2': BACKSCAL:\n"
+ printf " ${BACK_OLD} --> ${BACK_NEW}\n"
+ punlearn dmhedit
+ dmhedit infile=$2 filelist=none operation=add \
+ key=BACKSCAL value=${BACK_NEW} comment="old value: ${BACK_OLD}"
+}
+## bkg rescale }}}
+
+if [ $# -ne 2 ]; then
+ printf "usage:\n"
+ printf " `basename $0` <src_spec> <bkg_spec>\n"
+ printf "\nNOTE:\n"
+ printf "<bkg_spec> is the spectrum to be adjusted\n"
+ exit 1
+fi
+
+# perform `bkg_rescale'
+bkg_rescale "$1" "$2"
+
+exit 0
+
diff --git a/scripts/chandra_ccdgap_rect.py b/scripts/chandra_ccdgap_rect.py
new file mode 100755
index 0000000..60bcdac
--- /dev/null
+++ b/scripts/chandra_ccdgap_rect.py
@@ -0,0 +1,80 @@
+#!/usr/bin/python
+
+
+import sys
+import math
+import re
+ccd_size=1024
+
+def find_intersection(v1,v2):
+ x1=v1[0][0]
+ y1=v1[0][1]
+ x2=v1[1][0]
+ y2=v1[1][1]
+ x3=v2[0][0]
+ y3=v2[0][1]
+ x4=v2[1][0]
+ y4=v2[1][1]
+
+ k=((x3-x1)*(y3-y4)-(y3-y1)*(x3-x4))/((x2-x1)*(y3-y4)-(y2-y1)*(x3-x4))
+ return (x1+k*(x2-x1),y1+k*(y2-y1))
+
+def parse_poly(s):
+ p=s.split('(')[1].split(')')[0]
+ p=p.split(',')
+ vertex=[]
+ for i in range(0,int(len(p)/2)):
+ x,y=float(p[i*2]),float(p[i*2+1])
+ vertex.append((x,y))
+
+ vlist=[]
+ for i in range(0,len(vertex)):
+ n=i%(len(vertex))
+ n1=(i+1)%(len(vertex))
+ v=(vertex[n1][0]-vertex[n][0],
+ vertex[n1][1]-vertex[n][1])
+ l=(math.sqrt(v[0]**2+v[1]**2))
+ if l>ccd_size*.66:
+ vlist.append((vertex[n],vertex[n1]))
+ result=[]
+ for i in range(0,len(vlist)):
+ n=i%len(vlist)
+ n1=(i+1)%len(vlist)
+ v1=vlist[n]
+ v2=vlist[n1]
+ point=find_intersection(v1,v2)
+ result.append(point)
+ return result
+
+def form_poly(plist):
+ result="Polygon("
+ for i in range(0,len(plist)-1):
+ result+="%f,%f,"%(plist[i][0],plist[i][1])
+ result+="%f,%f)"%(plist[-1][0],plist[-1][1])
+ return result
+
+def poly2rect(plist):
+ c=[0,0]
+ if len(plist)!=4:
+ raise Exception("Error, the length of poly point list should be 4!")
+ for i in range(0,4):
+ c[0]+=plist[i][0]/4.
+ c[1]+=plist[i][1]/4.
+ w=0
+ for i in range(0,4):
+ n=i%4
+ n1=(i+1)%4
+ l=math.sqrt((plist[n][0]-plist[n1][0])**2+(plist[n][1]-plist[n1][1])**2)
+ w+=l/4
+ a=math.degrees(math.atan2(plist[1][1]-plist[0][1],plist[1][0]-plist[0][0]))
+ return "rotbox(%f,%f,%f,%f,%f)"%(c[0],c[1],w,w,a)
+
+if __name__=='__main__':
+ if len(sys.argv)!=2:
+ print("Usage:%s %s"%(sys.argv[0]," <input regfile, including only polygens>"))
+ sys.exit()
+ for i in open(sys.argv[1]):
+ if re.match('.*olygon',i):
+ reg=poly2rect(parse_poly(i))
+ print(reg)
+
diff --git a/scripts/chandra_collect_data_v3.sh b/scripts/chandra_collect_data_v3.sh
new file mode 100755
index 0000000..a1b56d8
--- /dev/null
+++ b/scripts/chandra_collect_data_v3.sh
@@ -0,0 +1,616 @@
+#!/bin/sh
+##
+unalias -a
+export LC_COLLATE=C
+# fix path for python
+export PATH=/usr/bin:$PATH
+#########################################################################
+## collect needed data and save to a JSON file
+##
+## LIweitiaNux <liweitianux@gmail.com>
+## August 31, 2012
+##
+## ChangeLogs:
+## check JSON syntax, modify output to agree with the syntax
+## Ref: http://json.parser.online.fr/
+## v1.1, 2012/09/05, LIweitiaNux
+## add `T_avg(0.2-0.5 R500)' and `T_err'
+## v2.0, 2012/10/14, LIweitiaNux
+## add parameters
+## v2.1, 2012/11/07, LIweitiaNux
+## account for `fitting_dbeta'
+## v2.2, 2012/12/18, LIweitiaNux
+## add `beta' and `cooling time' parameters
+## v3.0, 2013/02/09, LIweitiaNux
+## modified for `new sample info format'
+## v3.1, 2013/05/18, LIweitiaNux
+## add key `Feature'
+## v3.2, 2013/05/29, LIweitiaNux
+## add key `XCNTRD_RA, XCNTRD_DEC'
+## v3.3, 2013/10/14, LIweitiaNux
+## add key `Unified Name'
+#########################################################################
+
+## about, used in `usage' {{{
+VERSION="v3.0"
+UPDATE="2013-02-09"
+## about }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_CFG=12
+ERR_RES=13
+ERR_COSC=14
+ERR_BETA=101
+ERR_BETA2=102
+ERR_JSON=201
+ERR_MLOG=202
+ERR_BLOG=203
+ERR_CLOG=204
+## error code }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` json=<info_json> cfg=<main_cfg> res=<final_result> basedir=<repro_dir> massdir=<mass_dir>\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default basedir
+DFT_BASEDIR=".."
+# default mass dir
+DFT_MASSDIR="mass"
+# default pattern for json info file
+DFT_JSON_PAT="*_INFO.json"
+# main config file
+DFT_CFG_PAT="global.cfg"
+# default result file
+DFT_RES_FINAL_PAT="final_result.txt"
+# default sbprofile region file
+DFT_SBPROFILE_REG="sbprofile.reg"
+# default radial spectra region file
+DFT_RSPEC_REG="rspec.reg"
+# default CMD to determine the `spc/profile' dir
+DFT_SPC_DIR_CMD='dirname `readlink ${T_DATA_FILE}`'
+# default CMD to determine the `img' dir
+DFT_IMG_DIR_CMD='dirname `readlink ${RADIUS_SBP_FILE}`'
+# default config file pattern for `expcorr'
+DFT_EXPCORR_CONF_PAT="*_expcorr.conf"
+## default parameters }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+## functions }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+# init directory
+INIT_DIR=`pwd -P`
+
+# check given parameters
+# check given dir
+if [ -d "${basedir}" ] && ls ${basedir}/*repro_evt2.fits > /dev/null 2>&1; then
+ BASEDIR=${basedir}
+elif [ -d "${DFT_BASEDIR}" ] && ls ${DFT_BASEDIR}/*repro_evt2.fits > /dev/null 2>&1; then
+ BASEDIR=${DFT_BASEDIR}
+else
+ read -p "> basedir (contains info json): " BASEDIR
+ if [ ! -d "${BASEDIR}" ]; then
+ printf "ERROR: given \`${BASEDIR}' NOT a directory\n"
+ exit ${ERR_DIR}
+ elif ! ls ${BASEDIR}/*repro_evt2.fits > /dev/null 2>&1; then
+ printf "ERROR: given \`${BASEDIR}' NOT contains needed evt files\n"
+ exit ${ERR_DIR}
+ fi
+fi
+# remove the trailing '/'
+BASEDIR=`( cd ${INIT_DIR}/${BASEDIR} && pwd -P )`
+printf "## use basedir: \`${BASEDIR}'\n"
+# mass dir
+if [ ! -z "${massdir}" ] && [ -d "${BASEDIR}/${massdir}" ]; then
+ MASS_DIR=`( cd ${BASEDIR}/${massdir} && pwd -P )`
+elif [ -d "${BASEDIR}/${DFT_MASSDIR}" ]; then
+ MASS_DIR=`( cd ${BASEDIR}/${DFT_MASSDIR} && pwd -P )`
+else
+ read -p "> mass dir (relative to basedir): " MASS_DIR
+ if [ ! -d "${BASEDIR}/${MASS_DIR}" ]; then
+ printf "ERROR: given \`${BASEDIR}/${MASS_DIR}' NOT a directory\n"
+ exit ${ERR_DIR}
+ fi
+fi
+# remove the trailing '/'
+MASS_DIR=`echo ${MASS_DIR} | sed 's/\/*$//'`
+printf "## use massdir: \`${MASS_DIR}'\n"
+# check json file
+if [ ! -z "${json}" ] && [ -r "${BASEDIR}/${json}" ]; then
+ JSON_FILE="${json}"
+elif [ "`ls ${BASEDIR}/${DFT_JSON_PAT} | wc -l`" -eq 1 ]; then
+ JSON_FILE=`( cd ${BASEDIR} && ls ${DFT_JSON_PAT} )`
+else
+ read -p "> info json file: " JSON_FILE
+ if ! [ -r "${BASEDIR}/${JSON_FILE}" ]; then
+ printf "ERROR: cannot access given \`${BASEDIR}/${INFO_JSON}' file\n"
+ exit ${ERR_JSON}
+ fi
+fi
+printf "## use info json file: \`${JSON_FILE}'\n"
+# check main config file
+if [ ! -z "${cfg}" ] && [ -r "${cfg}" ]; then
+ CFG_FILE="${cfg}"
+elif [ -r "${DFT_CFG_PAT}" ]; then
+ CFG_FILE="${DFT_CFG_PAT}"
+else
+ read -p "> main config file: " CFG_FILE
+ if [ ! -r "${CFG_FILE}" ]; then
+ printf "ERROR: cannot access given \`${CFG_JSON}' file\n"
+ exit ${ERR_CFG}
+ fi
+fi
+printf "## use main config file: \`${CFG_FILE}'\n"
+SBP_CFG=`grep '^sbp_cfg' ${CFG_FILE} | awk '{ print $2 }'`
+printf "## sbp config file: \`${SBP_CFG}'\n"
+# check final result file
+if [ ! -z "${res}" ] && [ -r "${res}" ]; then
+ RES_FINAL="${res}"
+elif [ -r "${DFT_RES_FINAL_PAT}" ]; then
+ RES_FINAL="${DFT_RES_FINAL_PAT}"
+else
+ read -p "> final result file: " RES_FINAL
+ if [ ! -r "${RES_FINAL}" ]; then
+ printf "ERROR: cannot access given \`${RES_FINAL}' file\n"
+ exit ${ERR_RES}
+ fi
+fi
+printf "## use final result file: \`${RES_FINAL}'\n"
+## parameters }}}
+
+## directory & file {{{
+BASE_PATH=`dirname $0`
+printf "## BASE_PATH: ${BASE_PATH}\n"
+COSMO_CALC="${BASE_PATH}/cosmo_calc"
+if [ ! -x "${COSMO_CALC}" ]; then
+ printf "*** ERROR: \`${COSMO_CALC}' not executable\n"
+ exit ${ERR_COSC}
+fi
+EVT_DIR="${BASEDIR}/evt"
+IMG_DIR="${BASEDIR}/img"
+SPEC_DIR="${BASEDIR}/spc/profile"
+## dir & file }}}
+
+cd ${BASEDIR}
+printf "## enter directory: `pwd -P`\n"
+
+## in dir `repro' {{{
+punlearn dmkeypar
+EVT_RAW=`ls *repro_evt2.fits`
+OBS_ID=`dmkeypar ${EVT_RAW} OBS_ID echo=yes`
+DATE_OBS=`dmkeypar ${EVT_RAW} DATE-OBS echo=yes`
+EXPOSURE_RAW=`dmkeypar ${EVT_RAW} EXPOSURE echo=yes | awk '{ print $1/1000 }'`
+## ACIS_TYPE
+DETNAM=`dmkeypar ${EVT_RAW} DETNAM echo=yes`
+if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+ ACIS_TYPE="ACIS-I"
+elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+ ACIS_TYPE="ACIS-S"
+else
+ printf "*** ERROR: unknown detector type: ${DETNAM}\n"
+ ACIS_TYPE="UNKNOWN"
+fi
+## dir `repro' }}}
+
+## in dir `repro/evt' {{{
+cd ${EVT_DIR}
+EVT_CLEAN=`ls evt2_c*_clean.fits`
+EXPOSURE_CLEAN=`dmkeypar ${EVT_CLEAN} EXPOSURE echo=yes | awk '{ print $1/1000 }'`
+## dir `repro/evt' }}}
+
+## in dir `repro/mass' {{{
+cd ${MASS_DIR}
+
+# misc {{{
+N_H=`grep '^nh' ${CFG_FILE} | awk '{ print $2 }'`
+Z=`grep '^z' ${SBP_CFG} | awk '{ print $2 }'`
+T_DATA_FILE=`grep '^t_data_file' ${CFG_FILE} | awk '{ print $2 }'`
+NFW_RMIN_KPC=`grep '^nfw_rmin_kpc' ${CFG_FILE} | awk '{ print $2 }'`
+E_Z=`${COSMO_CALC} ${Z} | grep -i 'Hubble_parameter' | awk '{ print $3 }'`
+KPC_PER_PIXEL=`${COSMO_CALC} ${Z} | grep 'kpc/pixel' | awk '{ print $3 }'`
+RADIUS_SBP_FILE=`grep '^radius_sbp_file' ${CFG_FILE} | awk '{ print $2 }'`
+RMAX_SBP_PIX=`tail -n 1 ${RADIUS_SBP_FILE} | awk '{ print $1+$2 }'`
+RMAX_SBP_KPC=`echo "${RMAX_SBP_PIX} ${KPC_PER_PIXEL}" | awk '{ printf("%.2f", $1*$2) }'`
+SPC_DIR=`eval ${DFT_SPC_DIR_CMD}`
+if [ -f "${SPC_DIR}/${DFT_RSPEC_REG}" ]; then
+ RMAX_TPRO_PIX=`grep -iE '(pie|annulus)' ${SPC_DIR}/${DFT_RSPEC_REG} | tail -n 1 | awk -F',' '{ print $4 }'`
+ RMAX_TPRO_KPC=`echo "${RMAX_TPRO_PIX} ${KPC_PER_PIXEL}" | awk '{ printf("%.2f", $1*$2) }'`
+fi
+IMG_DIR=`eval ${DFT_IMG_DIR_CMD}`
+EXPCORR_CONF=`ls ${IMG_DIR}/${DFT_EXPCORR_CONF_PAT} 2> /dev/null`
+echo "EXPCORR_CONF: ${EXPCORR_CONF}"
+if [ -f "${EXPCORR_CONF}" ]; then
+ T_REF=`grep '^temp' ${EXPCORR_CONF} | awk '{ print $2 }'`
+ Z_REF=`grep '^abund' ${EXPCORR_CONF} | awk '{ print $2 }'`
+fi
+[ -z "${NFW_RMIN_KPC}" ] && NFW_RMIN_KPC="null"
+[ -z "${RMAX_SBP_PIX}" ] && RMAX_SBP_PIX="null"
+[ -z "${RMAX_SBP_KPC}" ] && RMAX_SBP_KPC="null"
+[ -z "${T_REF}" ] && T_REF="null"
+[ -z "${Z_REF}" ] && Z_REF="null"
+# misc }}}
+
+## determine single/double beta {{{
+if grep -q '^beta2' ${SBP_CFG}; then
+ MODEL_SBP="double-beta"
+ n01=`grep '^n01' ${RES_FINAL} | awk '{ print $2 }'`
+ beta1=`grep '^beta1' ${RES_FINAL} | awk '{ print $2 }'`
+ rc1=`grep -E '^rc1\s' ${RES_FINAL} | awk '{ print $2 }'`
+ rc1_kpc=`grep '^rc1_kpc' ${RES_FINAL} | awk '{ print $2 }'`
+ n02=`grep '^n02' ${RES_FINAL} | awk '{ print $2 }'`
+ beta2=`grep '^beta2' ${RES_FINAL} | awk '{ print $2 }'`
+ rc2=`grep -E '^rc2\s' ${RES_FINAL} | awk '{ print $2 }'`
+ rc2_kpc=`grep '^rc2_kpc' ${RES_FINAL} | awk '{ print $2 }'`
+ BKG=`grep '^bkg' ${RES_FINAL} | awk '{ print $2 }'`
+ # beta1 -> smaller rc; beta2 -> bigger rc
+ if [ `echo "${rc1} < ${rc2}" | bc -l` -eq 1 ]; then
+ N01=${n01}
+ BETA1=${beta1}
+ RC1=${rc1}
+ RC1_KPC=${rc1_kpc}
+ N02=${n02}
+ BETA2=${beta2}
+ RC2=${rc2}
+ RC2_KPC=${rc2_kpc}
+ else
+ # beta1 <-> beta2 (swap)
+ N01=${n02}
+ BETA1=${beta2}
+ RC1=${rc2}
+ RC1_KPC=${rc2_kpc}
+ N02=${n01}
+ BETA2=${beta1}
+ RC2=${rc1}
+ RC2_KPC=${rc1_kpc}
+ fi
+else
+ MODEL_SBP="single-beta"
+ N01="null"
+ BETA1="null"
+ RC1="null"
+ RC1_KPC="null"
+ N02=`grep '^n0' ${RES_FINAL} | awk '{ print $2 }'`
+ BETA2=`grep '^beta' ${RES_FINAL} | awk '{ print $2 }'`
+ RC2=`grep -E '^rc\s' ${RES_FINAL} | awk '{ print $2 }'`
+ RC2_KPC=`grep '^rc_kpc' ${RES_FINAL} | awk '{ print $2 }'`
+ BKG=`grep '^bkg' ${RES_FINAL} | awk '{ print $2 }'`
+fi
+## single/double beta }}}
+
+## get `mass/virial_radius/luminosity' {{{
+# 200 data {{{
+R200_VAL=`grep '^r200' ${RES_FINAL} | awk '{ print $2 }'`
+R200_ERR_L=`grep '^r200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+R200_ERR_U=`grep '^r200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+M200_VAL=`grep '^m200' ${RES_FINAL} | awk '{ print $2 }'`
+M200_ERR_L=`grep '^m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+M200_ERR_U=`grep '^m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+L200_VAL=`grep '^L200' ${RES_FINAL} | awk '{ print $2 }'`
+L200_ERR=`grep '^L200' ${RES_FINAL} | awk '{ print $4 }'`
+MGAS200_VAL=`grep '^gas_m200' ${RES_FINAL} | awk '{ print $2 }'`
+MGAS200_ERR_L=`grep '^gas_m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+MGAS200_ERR_U=`grep '^gas_m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+FGAS200_VAL=`grep '^gas_fraction200' ${RES_FINAL} | awk '{ print $2 }'`
+FGAS200_ERR_L=`grep '^gas_fraction200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+FGAS200_ERR_U=`grep '^gas_fraction200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+[ -z "${R200_VAL}" ] && R200_VAL="null"
+[ -z "${R200_ERR_L}" ] && R200_ERR_L="null"
+[ -z "${R200_ERR_U}" ] && R200_ERR_U="null"
+[ -z "${M200_VAL}" ] && M200_VAL="null"
+[ -z "${M200_ERR_L}" ] && M200_ERR_L="null"
+[ -z "${M200_ERR_U}" ] && M200_ERR_U="null"
+[ -z "${L200_VAL}" ] && L200_VAL="null"
+[ -z "${L200_ERR}" ] && L200_ERR="null"
+[ -z "${MGAS200_VAL}" ] && MGAS200_VAL="null"
+[ -z "${MGAS200_ERR_L}" ] && MGAS200_ERR_L="null"
+[ -z "${MGAS200_ERR_U}" ] && MGAS200_ERR_U="null"
+[ -z "${FGAS200_VAL}" ] && FGAS200_VAL="null"
+[ -z "${FGAS200_ERR_L}" ] && FGAS200_ERR_L="null"
+[ -z "${FGAS200_ERR_U}" ] && FGAS200_ERR_U="null"
+# 200 }}}
+# 500 data {{{
+R500_VAL=`grep '^r500' ${RES_FINAL} | awk '{ print $2 }'`
+R500_ERR_L=`grep '^r500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+R500_ERR_U=`grep '^r500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+M500_VAL=`grep '^m500' ${RES_FINAL} | awk '{ print $2 }'`
+M500_ERR_L=`grep '^m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+M500_ERR_U=`grep '^m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+L500_VAL=`grep '^L500' ${RES_FINAL} | awk '{ print $2 }'`
+L500_ERR=`grep '^L500' ${RES_FINAL} | awk '{ print $4 }'`
+MGAS500_VAL=`grep '^gas_m500' ${RES_FINAL} | awk '{ print $2 }'`
+MGAS500_ERR_L=`grep '^gas_m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+MGAS500_ERR_U=`grep '^gas_m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+FGAS500_VAL=`grep '^gas_fraction500' ${RES_FINAL} | awk '{ print $2 }'`
+FGAS500_ERR_L=`grep '^gas_fraction500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+FGAS500_ERR_U=`grep '^gas_fraction500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+[ -z "${R500_VAL}" ] && R500_VAL="null"
+[ -z "${R500_ERR_L}" ] && R500_ERR_L="null"
+[ -z "${R500_ERR_U}" ] && R500_ERR_U="null"
+[ -z "${M500_VAL}" ] && M500_VAL="null"
+[ -z "${M500_ERR_L}" ] && M500_ERR_L="null"
+[ -z "${M500_ERR_U}" ] && M500_ERR_U="null"
+[ -z "${L500_VAL}" ] && L500_VAL="null"
+[ -z "${L500_ERR}" ] && L500_ERR="null"
+[ -z "${MGAS500_VAL}" ] && MGAS500_VAL="null"
+[ -z "${MGAS500_ERR_L}" ] && MGAS500_ERR_L="null"
+[ -z "${MGAS500_ERR_U}" ] && MGAS500_ERR_U="null"
+[ -z "${FGAS500_VAL}" ] && FGAS500_VAL="null"
+[ -z "${FGAS500_ERR_L}" ] && FGAS500_ERR_L="null"
+[ -z "${FGAS500_ERR_U}" ] && FGAS500_ERR_U="null"
+# 500 }}}
+# 1500 data {{{
+R1500_VAL=`grep '^r1500' ${RES_FINAL} | awk '{ print $2 }'`
+R1500_ERR_L=`grep '^r1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+R1500_ERR_U=`grep '^r1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+M1500_VAL=`grep '^m1500' ${RES_FINAL} | awk '{ print $2 }'`
+M1500_ERR_L=`grep '^m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+M1500_ERR_U=`grep '^m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+L1500_VAL=`grep '^L1500' ${RES_FINAL} | awk '{ print $2 }'`
+L1500_ERR=`grep '^L1500' ${RES_FINAL} | awk '{ print $4 }'`
+MGAS1500_VAL=`grep '^gas_m1500' ${RES_FINAL} | awk '{ print $2 }'`
+MGAS1500_ERR_L=`grep '^gas_m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+MGAS1500_ERR_U=`grep '^gas_m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+FGAS1500_VAL=`grep '^gas_fraction1500' ${RES_FINAL} | awk '{ print $2 }'`
+FGAS1500_ERR_L=`grep '^gas_fraction1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+FGAS1500_ERR_U=`grep '^gas_fraction1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+[ -z "${R1500_VAL}" ] && R1500_VAL="null"
+[ -z "${R1500_ERR_L}" ] && R1500_ERR_L="null"
+[ -z "${R1500_ERR_U}" ] && R1500_ERR_U="null"
+[ -z "${M1500_VAL}" ] && M1500_VAL="null"
+[ -z "${M1500_ERR_L}" ] && M1500_ERR_L="null"
+[ -z "${M1500_ERR_U}" ] && M1500_ERR_U="null"
+[ -z "${L1500_VAL}" ] && L1500_VAL="null"
+[ -z "${L1500_ERR}" ] && L1500_ERR="null"
+[ -z "${MGAS1500_VAL}" ] && MGAS1500_VAL="null"
+[ -z "${MGAS1500_ERR_L}" ] && MGAS1500_ERR_L="null"
+[ -z "${MGAS1500_ERR_U}" ] && MGAS1500_ERR_U="null"
+[ -z "${FGAS1500_VAL}" ] && FGAS1500_VAL="null"
+[ -z "${FGAS1500_ERR_L}" ] && FGAS1500_ERR_L="null"
+[ -z "${FGAS1500_ERR_U}" ] && FGAS1500_ERR_U="null"
+# 1500 }}}
+# 2500 data {{{
+R2500_VAL=`grep '^r2500' ${RES_FINAL} | awk '{ print $2 }'`
+R2500_ERR_L=`grep '^r2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+R2500_ERR_U=`grep '^r2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+M2500_VAL=`grep '^m2500' ${RES_FINAL} | awk '{ print $2 }'`
+M2500_ERR_L=`grep '^m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+M2500_ERR_U=`grep '^m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+L2500_VAL=`grep '^L2500' ${RES_FINAL} | awk '{ print $2 }'`
+L2500_ERR=`grep '^L2500' ${RES_FINAL} | awk '{ print $4 }'`
+MGAS2500_VAL=`grep '^gas_m2500' ${RES_FINAL} | awk '{ print $2 }'`
+MGAS2500_ERR_L=`grep '^gas_m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+MGAS2500_ERR_U=`grep '^gas_m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+FGAS2500_VAL=`grep '^gas_fraction2500' ${RES_FINAL} | awk '{ print $2 }'`
+FGAS2500_ERR_L=`grep '^gas_fraction2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+FGAS2500_ERR_U=`grep '^gas_fraction2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+[ -z "${R2500_VAL}" ] && R2500_VAL="null"
+[ -z "${R2500_ERR_L}" ] && R2500_ERR_L="null"
+[ -z "${R2500_ERR_U}" ] && R2500_ERR_U="null"
+[ -z "${M2500_VAL}" ] && M2500_VAL="null"
+[ -z "${M2500_ERR_L}" ] && M2500_ERR_L="null"
+[ -z "${M2500_ERR_U}" ] && M2500_ERR_U="null"
+[ -z "${L2500_VAL}" ] && L2500_VAL="null"
+[ -z "${L2500_ERR}" ] && L2500_ERR="null"
+[ -z "${MGAS2500_VAL}" ] && MGAS2500_VAL="null"
+[ -z "${MGAS2500_ERR_L}" ] && MGAS2500_ERR_L="null"
+[ -z "${MGAS2500_ERR_U}" ] && MGAS2500_ERR_U="null"
+[ -z "${FGAS2500_VAL}" ] && FGAS2500_VAL="null"
+[ -z "${FGAS2500_ERR_L}" ] && FGAS2500_ERR_L="null"
+[ -z "${FGAS2500_ERR_U}" ] && FGAS2500_ERR_U="null"
+# 2500 }}}
+FGRR=`grep '^gas_fraction.*r2500.*r500=' ${RES_FINAL} | sed 's/^.*r500=//' | awk '{ print $1 }'`
+FGRR_ERR_L=`grep '^gas_fraction.*r2500.*r500=' ${RES_FINAL} | sed 's/^.*r500=//' | awk '{ print $2 }' | awk -F'/' '{ print $1 }'`
+FGRR_ERR_U=`grep '^gas_fraction.*r2500.*r500=' ${RES_FINAL} | sed 's/^.*r500=//' | awk '{ print $2 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+[ -z "${FGRR}" ] && FGRR="null"
+[ -z "${FGRR_ERR_L}" ] && FGRR_ERR_L="null"
+[ -z "${FGRR_ERR_U}" ] && FGRR_ERR_U="null"
+## mrl }}}
+
+## rcool & cooling time {{{
+RCOOL=`grep '^cooling' ${RES_FINAL} | awk '{ print $4 }'`
+COOLING_TIME=`grep '^cooling' ${RES_FINAL} | awk -F'=' '{ print $2 }' | tr -d ' Gyr'`
+[ -z "${RCOOL}" ] && RCOOL="null"
+[ -z "${COOLING_TIME}" ] && COOLING_TIME="null"
+## cooling time }}}
+## repro/mass }}}
+
+cd ${BASEDIR}
+## orig json file {{{
+printf "## collect data from original info file ...\n"
+OBJ_NAME=`grep '"Source\ Name' ${JSON_FILE} | sed 's/.*"Source.*":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+OBJ_UNAME=`grep '"Unified\ Name' ${JSON_FILE} | sed 's/.*"Unified.*":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+OBJ_RA=`grep '"R\.\ A\.' ${JSON_FILE} | sed 's/.*"R\.\ A\.":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+OBJ_DEC=`grep '"Dec\.' ${JSON_FILE} | sed 's/.*"Dec\.":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+OBJ_XCRA=`grep '"XCNTRD_RA' ${JSON_FILE} | sed 's/.*"XCNTRD_RA":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+OBJ_XCDEC=`grep '"XCNTRD_DEC' ${JSON_FILE} | sed 's/.*"XCNTRD_DEC":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+REDSHIFT=`grep '"redshift' ${JSON_FILE} | sed 's/.*"redshift.*":\ //' | sed 's/\ *,$//'`
+COOLCORE=`grep -i '"cool.*core' ${JSON_FILE} | sed 's/.*"[cC]ool.*":\ //' | sed 's/\ *,$//'`
+[ -z "${COOLCORE}" ] && COOLCORE="null"
+OBJ_FEATURE=`grep '"Feature' ${JSON_FILE} | sed 's/.*"Feature":\ //' | sed 's/^"//' | sed 's/"\ *,*$//'`
+OBJ_NOTE=`grep '"NOTE' ${JSON_FILE} | sed 's/.*"NOTE":\ //' | sed 's/^"//' | sed 's/"\ *,*$//'`
+
+# T & Z {{{
+T_1R500=`grep '"T(0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_1ERR=`grep '"T_err(.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_1ERR_L=`grep '"T_err_l.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_1ERR_U=`grep '"T_err_u.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+Z_1R500=`grep '"Z(0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_1ERR=`grep '"Z_err(.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_1ERR_L=`grep '"Z_err_l.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_1ERR_U=`grep '"Z_err_u.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+T_2R500=`grep '"T(0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_2ERR=`grep '"T_err(.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_2ERR_L=`grep '"T_err_l.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_2ERR_U=`grep '"T_err_u.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+Z_2R500=`grep '"Z(0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_2ERR=`grep '"Z_err(.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_2ERR_L=`grep '"Z_err_l.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_2ERR_U=`grep '"Z_err_u.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+[ -z "${T_1R500}" ] && T_1R500="null"
+[ -z "${T_1ERR}" ] && T_1ERR="null"
+[ -z "${T_1ERR_L}" ] && T_1ERR_L="null"
+[ -z "${T_1ERR_U}" ] && T_1ERR_U="null"
+[ -z "${Z_1R500}" ] && Z_1R500="null"
+[ -z "${Z_1ERR}" ] && Z_1ERR="null"
+[ -z "${Z_1ERR_L}" ] && Z_1ERR_L="null"
+[ -z "${Z_1ERR_U}" ] && Z_1ERR_U="null"
+[ -z "${T_2R500}" ] && T_2R500="null"
+[ -z "${T_2ERR}" ] && T_2ERR="null"
+[ -z "${T_2ERR_L}" ] && T_2ERR_L="null"
+[ -z "${T_2ERR_U}" ] && T_2ERR_U="null"
+[ -z "${Z_2R500}" ] && Z_2R500="null"
+[ -z "${Z_2ERR}" ] && Z_2ERR="null"
+[ -z "${Z_2ERR_L}" ] && Z_2ERR_L="null"
+[ -z "${Z_2ERR_U}" ] && Z_2ERR_U="null"
+# T & Z }}}
+## json data }}}
+
+## output data to JSON file {{{
+printf "## save collected data into file \`${JSON_FILE}' ...\n"
+mv -fv ${JSON_FILE} ${JSON_FILE}_bak
+cat > ${JSON_FILE} << _EOF_
+{
+ "Obs. ID": ${OBS_ID},
+ "Source Name": "${OBJ_NAME}",
+ "Unified Name": "${OBJ_UNAME}",
+ "Obs. Date": "${DATE_OBS}",
+ "Detector": "${ACIS_TYPE}",
+ "Exposure (ks)": ${EXPOSURE_RAW},
+ "Clean Exposure (ks)": ${EXPOSURE_CLEAN},
+ "R. A.": "${OBJ_RA}",
+ "Dec.": "${OBJ_DEC}",
+ "XCNTRD_RA": "${OBJ_XCRA}",
+ "XCNTRD_DEC": "${OBJ_XCDEC}",
+ "nH (10^22 cm^-2)": ${N_H},
+ "redshift": ${REDSHIFT},
+ "E(z)": ${E_Z},
+ "T_ref (keV)": ${T_REF},
+ "Z_ref (solar)": ${Z_REF},
+ "Rmax_SBP (pixel)": ${RMAX_SBP_PIX},
+ "Rmax_Tpro (pixel)": ${RMAX_TPRO_PIX},
+ "Rmax_SBP (kpc)": ${RMAX_SBP_KPC},
+ "Rmax_Tpro (kpc)": ${RMAX_TPRO_KPC},
+ "NFW_Rmin (kpc)": ${NFW_RMIN_KPC},
+ "Model_SBP": "${MODEL_SBP}",
+ "n01": ${N01},
+ "beta1": ${BETA1},
+ "rc1": ${RC1},
+ "rc1_kpc": ${RC1_KPC},
+ "n02": ${N02},
+ "beta2": ${BETA2},
+ "rc2": ${RC2},
+ "rc2_kpc": ${RC2_KPC},
+ "bkg": ${BKG},
+ "R200 (kpc)": ${R200_VAL},
+ "R200_err_lower (1sigma)": ${R200_ERR_L},
+ "R200_err_upper (1sigma)": ${R200_ERR_U},
+ "M200 (M_sun)": ${M200_VAL},
+ "M200_err_lower (1sigma)": ${M200_ERR_L},
+ "M200_err_upper (1sigma)": ${M200_ERR_U},
+ "L200 (erg/s)": ${L200_VAL},
+ "L200_err (1sigma)": ${L200_ERR},
+ "M_gas200 (M_sun)": ${MGAS200_VAL},
+ "M_gas200_err_lower (1sigma)": ${MGAS200_ERR_L},
+ "M_gas200_err_upper (1sigma)": ${MGAS200_ERR_U},
+ "F_gas200": ${FGAS200_VAL},
+ "F_gas200_err_lower (1sigma)": ${FGAS200_ERR_L},
+ "F_gas200_err_upper (1sigma)": ${FGAS200_ERR_U},
+ "R500 (kpc)": ${R500_VAL},
+ "R500_err_lower (1sigma)": ${R500_ERR_L},
+ "R500_err_upper (1sigma)": ${R500_ERR_U},
+ "M500 (M_sun)": ${M500_VAL},
+ "M500_err_lower (1sigma)": ${M500_ERR_L},
+ "M500_err_upper (1sigma)": ${M500_ERR_U},
+ "L500 (erg/s)": ${L500_VAL},
+ "L500_err (1sigma)": ${L500_ERR},
+ "M_gas500 (M_sun)": ${MGAS500_VAL},
+ "M_gas500_err_lower (1sigma)": ${MGAS500_ERR_L},
+ "M_gas500_err_upper (1sigma)": ${MGAS500_ERR_U},
+ "F_gas500": ${FGAS500_VAL},
+ "F_gas500_err_lower (1sigma)": ${FGAS500_ERR_L},
+ "F_gas500_err_upper (1sigma)": ${FGAS500_ERR_U},
+ "R1500": ${R1500_VAL},
+ "R1500_err_lower": ${R1500_ERR_L},
+ "R1500_err_upper": ${R1500_ERR_U},
+ "M1500": ${M1500_VAL},
+ "M1500_err_lower": ${M1500_ERR_L},
+ "M1500_err_upper": ${M1500_ERR_U},
+ "L1500": ${L1500_VAL},
+ "L1500_err": ${L1500_ERR},
+ "M_gas1500": ${MGAS1500_VAL},
+ "M_gas1500_err_lower": ${MGAS1500_ERR_L},
+ "M_gas1500_err_upper": ${MGAS1500_ERR_U},
+ "F_gas1500": ${FGAS1500_VAL},
+ "F_gas1500_err_lower": ${FGAS1500_ERR_L},
+ "F_gas1500_err_upper": ${FGAS1500_ERR_U},
+ "R2500": ${R2500_VAL},
+ "R2500_err_lower": ${R2500_ERR_L},
+ "R2500_err_upper": ${R2500_ERR_U},
+ "M2500": ${M2500_VAL},
+ "M2500_err_lower": ${M2500_ERR_L},
+ "M2500_err_upper": ${M2500_ERR_U},
+ "L2500": ${L2500_VAL},
+ "L2500_err": ${L2500_ERR},
+ "M_gas2500": ${MGAS2500_VAL},
+ "M_gas2500_err_lower": ${MGAS2500_ERR_L},
+ "M_gas2500_err_upper": ${MGAS2500_ERR_U},
+ "F_gas2500": ${FGAS2500_VAL},
+ "F_gas2500_err_lower": ${FGAS2500_ERR_L},
+ "F_gas2500_err_upper": ${FGAS2500_ERR_U},
+ "T(0.1-0.5 R500)": ${T_1R500},
+ "T_err(0.1-0.5 R500)": ${T_1ERR},
+ "T_err_l(0.1-0.5 R500)": ${T_1ERR_L},
+ "T_err_u(0.1-0.5 R500)": ${T_1ERR_U},
+ "Z(0.1-0.5 R500)": ${Z_1R500},
+ "Z_err(0.1-0.5 R500)": ${Z_1ERR},
+ "Z_err_l(0.1-0.5 R500)": ${Z_1ERR_L},
+ "Z_err_u(0.1-0.5 R500)": ${Z_1ERR_U},
+ "T(0.2-0.5 R500)": ${T_2R500},
+ "T_err(0.2-0.5 R500)": ${T_2ERR},
+ "T_err_l(0.2-0.5 R500)": ${T_2ERR_L},
+ "T_err_u(0.2-0.5 R500)": ${T_2ERR_U},
+ "Z(0.2-0.5 R500)": ${Z_2R500},
+ "Z_err(0.2-0.5 R500)": ${Z_2ERR},
+ "Z_err_l(0.2-0.5 R500)": ${Z_2ERR_L},
+ "Z_err_u(0.2-0.5 R500)": ${Z_2ERR_U},
+ "F_gas(R2500-R500)": ${FGRR},
+ "F_gas_err_l(R2500-R500)": ${FGRR_ERR_L},
+ "F_gas_err_u(R2500-R500)": ${FGRR_ERR_U},
+ "R_cool (kpc)": ${RCOOL},
+ "Cooling_time (Gyr)": ${COOLING_TIME},
+ "Cool_core": ${COOLCORE},
+ "Feature": "${OBJ_FEATURE}",
+ "NOTE": "${OBJ_NOTE}"
+},
+_EOF_
+## output JSON }}}
+
+exit 0
+
diff --git a/scripts/chandra_gensbpreg.sh b/scripts/chandra_gensbpreg.sh
new file mode 100755
index 0000000..df56501
--- /dev/null
+++ b/scripts/chandra_gensbpreg.sh
@@ -0,0 +1,135 @@
+#!/bin/sh
+#
+# v2.0, 2013/03/06, LIweitiaNux
+# add param `-t' to test STN
+#
+###########################################################
+
+# minimal counts
+CNT_MIN=50
+# energy: 700-7000eV -- channel 49:479
+CH_LOW=49
+CH_HI=479
+# energy 9.5-12keV -- channel 651:822
+CH_BKG_LOW=651
+CH_BKG_HI=822
+
+if [ $# -lt 6 ]; then
+ printf "usage:\n"
+ printf " `basename $0` <evt> <evt_e> <x> <y> <bkg_pi> <reg_out>\n"
+ printf " `basename $0` -t <evt> <evt_e> <x> <y> <bkg_pi> <rin1> ...\n"
+ exit 1
+fi
+
+if [ "x$1" = "x-t" ] && [ $# -ge 7 ]; then
+ EVT=$2
+ EVT_E=$3
+ X=$4
+ Y=$5
+ BKGSPC=$6
+ # process <rin> ...
+ printf "REG -- STN\n"
+ while [ ! -z "$7" ]; do
+ RIN=$7
+ shift
+ ROUT=`echo "scale=2; $RIN * 1.2" | bc -l`
+ TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
+ TMP_SPC="_tmpspc.pi"
+ punlearn dmextract
+ dmextract infile="${EVT}[sky=${TMP_REG}][bin pi]" outfile=${TMP_SPC} wmap="[energy=300:12000][bin det=8]" clobber=yes
+ INDEX_SRC=`dmstat "${TMP_SPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
+ INDEX_BKG=`dmstat "${BKGSPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
+ COUNT_SRC=`dmstat "${TMP_SPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
+ COUNT_BKG=`dmstat "${BKGSPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
+ # echo "CNT_SRC: ${COUNT_SRC}, IDX_SRC: ${INDEX_SRC}, CNT_BKG: ${COUNT_BKG}, IDX_BKG: ${INDEX_BKG}"
+ # exit
+ STN=`echo ${COUNT_SRC} ${INDEX_SRC} ${COUNT_BKG} ${INDEX_BKG} | awk '{ printf("%f",$1/$2/$3*$4) }'`
+ printf "${TMP_REG} -- ${STN}\n"
+ [ -e "${TMP_SPC}" ] && rm -f ${TMP_SPC}
+ done
+ # exit
+ exit 0
+fi
+
+EVT=$1
+EVT_E=$2
+X=$3
+Y=$4
+BKGSPC=$5
+REG_OUT=$6
+[ -f "${REG_OUT}" ] && mv -fv ${REG_OUT} ${REG_OUT}_bak
+echo "EVT: ${EVT}"
+echo "EVT_E: ${EVT_E}"
+echo "X: ${X}"
+echo "Y: ${Y}"
+echo "BKGSPC: ${BKGSPC}"
+echo ""
+
+###
+printf "## remove dmlist.par dmextract.par\n"
+DMLIST_PAR="$HOME/cxcds_param4/dmlist.par"
+DMEXTRACT_PAR="$HOME/cxcds_param4/dmextract.par"
+[ -f "${DMLIST_PAR}" ] && rm -f ${DMLIST_PAR}
+[ -f "${DMEXTRACT_PAR}" ] && rm -f ${DMEXTRACT_PAR}
+
+RIN=0
+ROUT=0
+CNTS=0
+for i in `seq 1 10`; do
+ printf "gen reg #$i @ cnts:${CNT_MIN} ...\n"
+ ROUT=`expr $RIN + 5`
+ TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
+ CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{ print $8 }'`
+ while [ `echo "$CNTS < $CNT_MIN" | bc -l` -eq 1 ]; do
+ ROUT=`expr $ROUT + 1`
+ TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
+ CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{ print $8 }'`
+ done
+ TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
+ echo "${TMP_REG}" >> ${REG_OUT}
+ RIN=$ROUT
+done
+
+# last reg width
+#REG_W=`tail -n 1 ${REG_OUT} | tr -d '()' | awk -F',' '{ print $4-$3 }'`
+
+RIN=$ROUT
+ROUT=`echo "scale=2; $ROUT * 1.2" | bc -l`
+
+STN=10
+i=11
+STN_FILE="sbp_stn.dat"
+[ -e ${STN_FILE} ] && mv -fv ${STN_FILE} ${STN_FILE}_bak
+while [ `echo "${STN} > 1.5" | bc -l` -eq 1 ]; do
+ printf "gen reg #$i \n"
+ i=`expr $i + 1`
+ TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
+
+ # next reg
+ RIN=$ROUT
+ ROUT=`echo "scale=2; $ROUT * 1.2" | bc -l`
+ TMP_SPC=_tmpspc.pi
+ punlearn dmextract
+ dmextract infile="${EVT}[sky=${TMP_REG}][bin pi]" outfile=${TMP_SPC} wmap="[energy=300:12000][bin det=8]" clobber=yes
+ INDEX_SRC=`dmstat "${TMP_SPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
+ INDEX_BKG=`dmstat "${BKGSPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
+
+ COUNT_SRC=`dmstat "${TMP_SPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
+ COUNT_BKG=`dmstat "${BKGSPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
+
+ # echo "CNT_SRC: ${COUNT_SRC}, IDX_SRC: ${INDEX_SRC}, CNT_BKG: ${COUNT_BKG}, IDX_BKG: ${INDEX_BKG}"
+ # exit
+
+ STN=`echo ${COUNT_SRC} ${INDEX_SRC} ${COUNT_BKG} ${INDEX_BKG} | awk '{ printf("%f",$1/$2/$3*$4) }'`
+ CNT=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{ print $8 }'`
+ echo "CNT: ${CNT}"
+ echo "CNT_MIN: ${CNT_MIN}"
+ if [ `echo "${CNT} < ${CNT_MIN}" | bc -l` -eq 1 ]; then
+ break
+ fi
+ echo "STN: ${STN}"
+ echo "RIN: ${RIN}, ROUT: ${ROUT}"
+ echo "STN: ${STN}" >> ${STN_FILE}
+ echo "${TMP_REG}" >> ${REG_OUT}
+done
+
diff --git a/scripts/chandra_genspcreg.sh b/scripts/chandra_genspcreg.sh
new file mode 100755
index 0000000..5099136
--- /dev/null
+++ b/scripts/chandra_genspcreg.sh
@@ -0,0 +1,130 @@
+#!/bin/sh
+
+if [ $# -ne 6 ] ; then
+ printf "usage:\n"
+ printf " `basename $0` <evt> <evt_e> <bkg_pi> <x> <y> <reg_out>\n"
+ exit 1
+fi
+
+EVT=$1
+EVT_E=$2
+BKGSPC=$3
+X=$4
+Y=$5
+REG_OUT=$6
+[ -f "${REG_OUT}" ] && mv -fv ${REG_OUT} ${REG_OUT}_bak
+
+echo "EVT: ${EVT}"
+echo "EVT_E: ${EVT_E}"
+echo "BKGSPC: ${BKGSPC}"
+echo "X: ${X}"
+echo "Y: ${Y}"
+echo ""
+
+###
+printf "## remove dmlist.par dmextract.par\n"
+DMLIST_PAR="$HOME/cxcds_param4/dmlist.par"
+DMEXTRACT_PAR="$HOME/cxcds_param4/dmextract.par"
+[ -f "${DMLIST_PAR}" ] && rm -fv ${DMLIST_PAR}
+[ -f "${DMEXTRACT_PAR}" ] && rm -fv ${DMEXTRACT_PAR}
+
+#min counts
+CNT_MIN=2500
+#singal to noise
+STN=10
+
+#energy700:7000 -- channel 49:479
+CH_LOW=49
+CH_HI=479
+#energy 9,5kev-12kev -- channel 651:822
+CH_BKG_LOW=651
+CH_BKG_HI=822
+
+RIN=0
+ROUT=0
+CNTS=0
+ROUT_MAX=1500
+
+STN_FILE="spc_stn.dat"
+[ -e ${STN_FILE} ] && mv -fv ${STN_FILE} ${STN_FILE}_bak
+i=0
+while [ `echo "$STN > 2 "| bc -l` -eq 1 ] ; do
+ ## LIweitiaNux
+ if [ `echo "$ROUT > $ROUT_MAX" | bc -l` -eq 1 ]; then
+ break
+ fi
+ RIN=${ROUT}
+ if [ $i -gt 0 ]; then
+ printf " #$i: ${TMP_REG}\n"
+ echo "${TMP_REG}" >> ${REG_OUT}
+ fi
+ i=`expr $i + 1`
+ printf "gen reg#$i ...\n"
+ if [ ${ROUT} -eq 0 ] ; then
+ ROUT=5
+ fi
+ TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
+ CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{print $8}'`
+ while [ ${CNTS} -lt ${CNT_MIN} ]; do
+ ROUT=`expr $ROUT + 1 `
+ if [ `echo "$ROUT > $ROUT_MAX" | bc -l` -eq 1 ]; then
+ break
+ fi
+ TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
+ CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{print $8}'`
+ done
+ TMP_SPC=_tmpspc.pi
+ punlearn dmextract
+ dmextract infile="${EVT}[sky=${TMP_REG}][bin pi]" outfile=${TMP_SPC} wmap="[energy=300:12000][bin tdet=8]" clobber=yes
+ INDEX_SRC=`dmstat "${TMP_SPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}' `
+ INDEX_BKG=`dmstat "${BKGSPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}' `
+
+ COUNT_SRC=`dmstat "${TMP_SPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}' `
+ COUNT_BKG=`dmstat "${BKGSPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}' `
+ if [ ${INDEX_SRC} -eq 0 ] ;then
+ STN=10000
+ else
+ STN=`echo ${COUNT_SRC} ${INDEX_SRC} ${COUNT_BKG} ${INDEX_BKG} | awk '{ printf("%f",$1/$2/$3*$4) }' `
+ fi
+ echo " STN: ${STN}"
+ echo "${STN}" >> "${STN_FILE}"
+done
+## LIweitiaNux
+## fix 'i', to consistent with the actual annuluses
+i=`expr $i - 1`
+
+if [ $i -lt 3 ]; then
+ printf "*** WARNING: NOT ENOUGH PHOTONS ***\n"
+ printf "*** TOTAL $i regions ***\n\n"
+ if [ ! -f ${REG_OUT} ] || [ `wc -l ${REG_OUT} | awk '{ print $1 }'` -eq 0 ]; then
+ printf "*** ONLY 1 REGION: ${TMP_REG}\n"
+ rm -f ${REG_OUT}
+ echo "${TMP_REG}" >> ${REG_OUT}
+ fi
+elif [ $i -gt 6 ]; then
+ mv -fv ${REG_OUT} ${REG_OUT}_2500bak
+ CNTS=0
+ CNTS_TOTAL=`dmlist "${EVT_E}[sky=pie($X,$Y,0,$RIN,0,360)]" blocks | grep 'EVENTS' | awk '{print $8}'`
+ CNTS_USE=`echo "${CNTS_TOTAL} 6" | awk '{printf("%d", $1/$2)}'`
+ echo "CNT_USE: ${CNT_USE}"
+ printf "*** too many annulus ***\n"
+ printf "*** using ${CNTS_USE} per region ***\n"
+ RIN=0
+ ROUT=5
+ j=1
+ while [ $j -le 6 ] ; do
+ while [ ${CNTS} -lt ${CNTS_USE} ] ; do
+ ROUT=`expr ${ROUT} + 1 `
+ if [ ${ROUT} -gt ${ROUT_MAX} ]; then
+ break
+ fi
+ TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
+ CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{print $8}'`
+ done
+ j=`expr $j + 1 `
+ echo "${TMP_REG}" >> ${REG_OUT}
+ RIN=$ROUT
+ CNTS=0
+ done
+fi
+
diff --git a/scripts/chandra_json2csv_v3.py b/scripts/chandra_json2csv_v3.py
new file mode 100755
index 0000000..cf69cf9
--- /dev/null
+++ b/scripts/chandra_json2csv_v3.py
@@ -0,0 +1,270 @@
+#!/usr/bin/python
+# avoid using `/usr/bin/env python'
+#
+# convert JSON to CSV format
+# Ref: http://stackoverflow.com/questions/1871524/convert-from-json-to-csv-using-python
+## Ref: http://json.parser.online.fr/
+#
+# LIweitiaNux <liweitianux@gmail.com>
+# August 31, 2012
+#
+# ChangeLogs:
+# v3.1, 2013/05/18, LIweitiaNux
+# add key `Feature', corresponding to `collectdata_v3.1'
+# v3.2, 2013/05/29, LIweitiaNux
+# add key `XCNTRD_RA, XCNTRD_DEC'
+# v3.3, 2013/10/14, LIweitiaNux
+# add key `Unified Name'
+#
+
+import sys
+import csv
+import json
+
+argc = len(sys.argv)
+if (argc != 3):
+ print("usage:")
+ print(" "+ sys.argv[0]+ " <input_json> <output_csv>")
+ sys.exit(255)
+
+infile = open(sys.argv[1], 'r')
+data = json.load(infile)
+infile.close()
+
+outfile = csv.writer(open(sys.argv[2], "wb+"))
+
+# write header {{{
+outfile.writerow([
+ "Obs. ID",
+ "Source Name",
+ "Unified Name",
+ "Obs. Date",
+ "Detector",
+ "Exposure (ks)",
+ "Clean Exposure (ks)",
+ "R. A.",
+ "Dec.",
+ "XCNTRD_RA",
+ "XCNTRD_DEC",
+ "nH (10^22 cm^-2)",
+ "redshift",
+ "E(z)",
+ "T_ref (keV)",
+ "Z_ref (solar)",
+ "Rmax_SBP (pixel)",
+ "Rmax_Tpro (pixel)",
+ "Rmax_SBP (kpc)",
+ "Rmax_Tpro (kpc)",
+ "NFW_Rmin (kpc)",
+ "Model_SBP",
+ "n01",
+ "beta1",
+ "rc1",
+ "rc1_kpc",
+ "n02",
+ "beta2",
+ "rc2",
+ "rc2_kpc",
+ "bkg",
+ "R200 (kpc)",
+ "R200_err_lower (1sigma)",
+ "R200_err_upper (1sigma)",
+ "M200 (M_sun)",
+ "M200_err_lower (1sigma)",
+ "M200_err_upper (1sigma)",
+ "L200 (erg/s)",
+ "L200_err (1sigma)",
+ "M_gas200 (M_sun)",
+ "M_gas200_err_lower (1sigma)",
+ "M_gas200_err_upper (1sigma)",
+ "F_gas200",
+ "F_gas200_err_lower (1sigma)",
+ "F_gas200_err_upper (1sigma)",
+ "R500 (kpc)",
+ "R500_err_lower (1sigma)",
+ "R500_err_upper (1sigma)",
+ "M500 (M_sun)",
+ "M500_err_lower (1sigma)",
+ "M500_err_upper (1sigma)",
+ "L500 (erg/s)",
+ "L500_err (1sigma)",
+ "M_gas500 (M_sun)",
+ "M_gas500_err_lower (1sigma)",
+ "M_gas500_err_upper (1sigma)",
+ "F_gas500",
+ "F_gas500_err_lower (1sigma)",
+ "F_gas500_err_upper (1sigma)",
+ "R1500",
+ "R1500_err_lower",
+ "R1500_err_upper",
+ "M1500",
+ "M1500_err_lower",
+ "M1500_err_upper",
+ "L1500",
+ "L1500_err",
+ "M_gas1500",
+ "M_gas1500_err_lower",
+ "M_gas1500_err_upper",
+ "F_gas1500",
+ "F_gas1500_err_lower",
+ "F_gas1500_err_upper",
+ "R2500",
+ "R2500_err_lower",
+ "R2500_err_upper",
+ "M2500",
+ "M2500_err_lower",
+ "M2500_err_upper",
+ "L2500",
+ "L2500_err",
+ "M_gas2500",
+ "M_gas2500_err_lower",
+ "M_gas2500_err_upper",
+ "F_gas2500",
+ "F_gas2500_err_lower",
+ "F_gas2500_err_upper",
+ "T(0.1-0.5 R500)",
+ "T_err(0.1-0.5 R500)",
+ "T_err_l(0.1-0.5 R500)",
+ "T_err_u(0.1-0.5 R500)",
+ "Z(0.1-0.5 R500)",
+ "Z_err(0.1-0.5 R500)",
+ "Z_err_l(0.1-0.5 R500)",
+ "Z_err_u(0.1-0.5 R500)",
+ "T(0.2-0.5 R500)",
+ "T_err(0.2-0.5 R500)",
+ "T_err_l(0.2-0.5 R500)",
+ "T_err_u(0.2-0.5 R500)",
+ "Z(0.2-0.5 R500)",
+ "Z_err(0.2-0.5 R500)",
+ "Z_err_l(0.2-0.5 R500)",
+ "Z_err_u(0.2-0.5 R500)",
+ "F_gas(R2500-R500)",
+ "F_gas_err_l(R2500-R500)",
+ "F_gas_err_u(R2500-R500)",
+ "R_cool (kpc)",
+ "Cooling_time (Gyr)",
+ "Cool_core",
+ "Feature",
+ "NOTE"
+])
+## header }}}
+
+# write data {{{
+for item in data:
+ outfile.writerow([
+ item["Obs. ID"],
+ item["Source Name"],
+ item["Unified Name"],
+ item["Obs. Date"],
+ item["Detector"],
+ item["Exposure (ks)"],
+ item["Clean Exposure (ks)"],
+ item["R. A."],
+ item["Dec."],
+ item["XCNTRD_RA"],
+ item["XCNTRD_DEC"],
+ item["nH (10^22 cm^-2)"],
+ item["redshift"],
+ item["E(z)"],
+ item["T_ref (keV)"],
+ item["Z_ref (solar)"],
+ item["Rmax_SBP (pixel)"],
+ item["Rmax_Tpro (pixel)"],
+ item["Rmax_SBP (kpc)"],
+ item["Rmax_Tpro (kpc)"],
+ item["NFW_Rmin (kpc)"],
+ item["Model_SBP"],
+ item["n01"],
+ item["beta1"],
+ item["rc1"],
+ item["rc1_kpc"],
+ item["n02"],
+ item["beta2"],
+ item["rc2"],
+ item["rc2_kpc"],
+ item["bkg"],
+ item["R200 (kpc)"],
+ item["R200_err_lower (1sigma)"],
+ item["R200_err_upper (1sigma)"],
+ item["M200 (M_sun)"],
+ item["M200_err_lower (1sigma)"],
+ item["M200_err_upper (1sigma)"],
+ item["L200 (erg/s)"],
+ item["L200_err (1sigma)"],
+ item["M_gas200 (M_sun)"],
+ item["M_gas200_err_lower (1sigma)"],
+ item["M_gas200_err_upper (1sigma)"],
+ item["F_gas200"],
+ item["F_gas200_err_lower (1sigma)"],
+ item["F_gas200_err_upper (1sigma)"],
+ item["R500 (kpc)"],
+ item["R500_err_lower (1sigma)"],
+ item["R500_err_upper (1sigma)"],
+ item["M500 (M_sun)"],
+ item["M500_err_lower (1sigma)"],
+ item["M500_err_upper (1sigma)"],
+ item["L500 (erg/s)"],
+ item["L500_err (1sigma)"],
+ item["M_gas500 (M_sun)"],
+ item["M_gas500_err_lower (1sigma)"],
+ item["M_gas500_err_upper (1sigma)"],
+ item["F_gas500"],
+ item["F_gas500_err_lower (1sigma)"],
+ item["F_gas500_err_upper (1sigma)"],
+ item["R1500"],
+ item["R1500_err_lower"],
+ item["R1500_err_upper"],
+ item["M1500"],
+ item["M1500_err_lower"],
+ item["M1500_err_upper"],
+ item["L1500"],
+ item["L1500_err"],
+ item["M_gas1500"],
+ item["M_gas1500_err_lower"],
+ item["M_gas1500_err_upper"],
+ item["F_gas1500"],
+ item["F_gas1500_err_lower"],
+ item["F_gas1500_err_upper"],
+ item["R2500"],
+ item["R2500_err_lower"],
+ item["R2500_err_upper"],
+ item["M2500"],
+ item["M2500_err_lower"],
+ item["M2500_err_upper"],
+ item["L2500"],
+ item["L2500_err"],
+ item["M_gas2500"],
+ item["M_gas2500_err_lower"],
+ item["M_gas2500_err_upper"],
+ item["F_gas2500"],
+ item["F_gas2500_err_lower"],
+ item["F_gas2500_err_upper"],
+ item["T(0.1-0.5 R500)"],
+ item["T_err(0.1-0.5 R500)"],
+ item["T_err_l(0.1-0.5 R500)"],
+ item["T_err_u(0.1-0.5 R500)"],
+ item["Z(0.1-0.5 R500)"],
+ item["Z_err(0.1-0.5 R500)"],
+ item["Z_err_l(0.1-0.5 R500)"],
+ item["Z_err_u(0.1-0.5 R500)"],
+ item["T(0.2-0.5 R500)"],
+ item["T_err(0.2-0.5 R500)"],
+ item["T_err_l(0.2-0.5 R500)"],
+ item["T_err_u(0.2-0.5 R500)"],
+ item["Z(0.2-0.5 R500)"],
+ item["Z_err(0.2-0.5 R500)"],
+ item["Z_err_l(0.2-0.5 R500)"],
+ item["Z_err_u(0.2-0.5 R500)"],
+ item["F_gas(R2500-R500)"],
+ item["F_gas_err_l(R2500-R500)"],
+ item["F_gas_err_u(R2500-R500)"],
+ item["R_cool (kpc)"],
+ item["Cooling_time (Gyr)"],
+ item["Cool_core"],
+ item["Feature"],
+ item["NOTE"]
+ ])
+## write data }}}
+
+## EOF
+
diff --git a/scripts/chandra_pb_flux.sh b/scripts/chandra_pb_flux.sh
new file mode 100755
index 0000000..d6860f8
--- /dev/null
+++ b/scripts/chandra_pb_flux.sh
@@ -0,0 +1,43 @@
+#!/bin/sh
+#
+# chandra particle background
+# 9.5-12.0 keV (channel: 651-822)
+# PI = [ energy(eV) / 14.6 eV + 1 ]
+#
+# LIweitiaNux <liweitianux@gmail.com>
+# July 30, 2012
+#
+# ChangeLog:
+# v1.1: August 2, 2012
+# fix bugs with scientific notation in `bc'
+#
+
+if [ $# -eq 0 ]; then
+ echo "usage:"
+ echo " `basename $0` <spec> ..."
+ exit 1
+fi
+
+## energy range: 9.5 -- 12.0 keV
+CH_LOW=651
+CH_HI=822
+
+echo "CHANNEL: $CH_LOW -- $CH_HI"
+
+while ! [ -z $1 ]; do
+ f=$1
+ shift
+ echo "FILE: $f"
+ punlearn dmstat
+ COUNTS=`dmstat "$f[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep 'sum:' | awk '{ print $2 }'`
+ punlearn dmkeypar
+ EXPTIME=`dmkeypar $f EXPOSURE echo=yes`
+ BACK=`dmkeypar $f BACKSCAL echo=yes`
+ # fix `scientific notation' bug for `bc'
+ EXPTIME_B=`echo ${EXPTIME} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_B=`echo "( ${BACK} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ PB_FLUX=`echo "scale = 16; ${COUNTS} / ${EXPTIME_B} / ${BACK_B}" | bc -l`
+ echo " counts / exptime / backscal: ${COUNTS} / ${EXPTIME} / ${BACK}"
+ echo " ${PB_FLUX}"
+done
+
diff --git a/scripts/chandra_update_xcentroid.sh b/scripts/chandra_update_xcentroid.sh
new file mode 100755
index 0000000..3f2a907
--- /dev/null
+++ b/scripts/chandra_update_xcentroid.sh
@@ -0,0 +1,212 @@
+#!/bin/sh
+#
+unalias -a
+export LC_COLLATE=C
+###########################################################
+## based on `ciao_expcorr_sbp.sh' ##
+## get `xcentroid' from region `sbprofile.reg' ##
+## convert from physical coords to WCS corrds ##
+## add/update xcentroid WCS to info.json ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## 2013/05/29 ##
+###########################################################
+
+###########################################################
+## ChangeLogs:
+## v1.0, 2013/05/29, LIweitiaNux
+###########################################################
+
+## about, used in `usage' {{{
+VERSION="v1.0"
+UPDATE="2013-05-29"
+## about }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_INFO=15
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+ERR_DET=41
+ERR_ENG=42
+ERR_CIAO=100
+## error code }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt_file> reg=<sbp_reg> basedir=<base_dir> info=<INFO.json> update=<yes|no>\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default `event file' which used to match `blanksky' files
+#DFT_EVT="_NOT_EXIST_"
+DFT_EVT="`ls evt2*_clean.fits 2> /dev/null`"
+# default dir which contains `asols, asol.lis, ...' files
+# DFT_BASEDIR="_NOT_EXIST_"
+DFT_BASEDIR=".."
+# default `radial region file' to extract surface brightness
+#DFT_SBP_REG="_NOT_EXIST_"
+DFT_SBP_REG="sbprofile.reg"
+
+## howto find files in `basedir'
+# default `asol.lis pattern'
+DFT_ASOLIS_PAT="acis*asol?.lis"
+# default INFO.json pattern
+DFT_INFO_PAT="*_INFO.json"
+## default parameters }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+## functions }}}
+
+## check CIAO init {{{
+if [ -z "${ASCDS_INSTALL}" ]; then
+ printf "ERROR: CIAO NOT initialized\n"
+ exit ${ERR_CIAO}
+fi
+
+## XXX: heasoft's `pget' etc. tools conflict with some CIAO tools
+printf "set \$PATH to avoid conflicts between HEAsoft and CIAO\n"
+export PATH="${ASCDS_BIN}:${ASCDS_CONTRIB}:${PATH}"
+## check CIAO }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+# check given parameters
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "clean evt2 file: " EVT
+ if [ ! -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n" | ${TOLOG}
+# check given region file(s)
+if [ -r "${reg}" ]; then
+ SBP_REG="${reg}"
+elif [ -r "${DFT_SBP_REG}" ]; then
+ SBP_REG=${DFT_SBP_REG}
+else
+ read -p "> surface brighness radial region file: " SBP_REG
+ if [ ! -r "${SBP_REG}" ]; then
+ printf "ERROR: cannot access given \`${SBP_REG}' region file\n"
+ exit ${ERR_REG}
+ fi
+fi
+printf "## use reg file(s): \`${SBP_REG}'\n" | ${TOLOG}
+# check given dir
+if [ -d "${basedir}" ]; then
+ BASEDIR=${basedir}
+elif [ -d "${DFT_BASEDIR}" ]; then
+ BASEDIR=${DFT_BASEDIR}
+else
+ read -p "> basedir (contains asol files): " BASEDIR
+ if [ ! -d "${BASEDIR}" ]; then
+ printf "ERROR: given \`${BASEDIR}' NOT a directory\n"
+ exit ${ERR_DIR}
+ fi
+fi
+# remove the trailing '/'
+BASEDIR=`echo ${BASEDIR} | sed 's/\/*$//'`
+printf "## use basedir: \`${BASEDIR}'\n" | ${TOLOG}
+# check INFO.json file
+if [ ! -z "${info}" ] && [ -r "${BASEDIR}/${info}" ]; then
+ INFO_JSON="${info}"
+elif [ "`ls ${BASEDIR}/${DFT_INFO_PAT} | wc -l`" -eq 1 ]; then
+ INFO_JSON=`( cd ${BASEDIR} && ls ${DFT_INFO_PAT} )`
+else
+ read -p "> info json file: " INFO_JSON
+ if ! [ -r "${BASEDIR}/${INFO_JSON}" ]; then
+ printf "ERROR: cannot access given \`${BASEDIR}/${INFO_JSON}' file\n"
+ exit ${ERR_INFO}
+ fi
+fi
+INFO_JSON=`readlink -f ${BASEDIR}/${INFO_JSON}`
+printf "## use info json file: \`${INFO_JSON}'\n"
+# update flag: whether to update xcentroid in the info.json file
+if [ ! -z "${update}" ]; then
+ case "${update}" in
+ [nN][oO]|[fF]*)
+ F_UPDATE="NO"
+ ;;
+ *)
+ F_UPDATE="YES"
+ ;;
+ esac
+else
+ F_UPDATE="YES"
+fi
+## parameters }}}
+
+## main process {{{
+# asolis
+ASOLIS=`( cd ${BASEDIR} && ls ${DFT_ASOLIS_PAT} 2> /dev/null )`
+
+# get (x,y) from sbp region
+printf "get (x,y) from ${SBP_REG}\n"
+X=`grep -iE '(pie|annulus)' ${SBP_REG} | head -n 1 | awk -F',' '{ print $1 }' | tr -d 'a-zA-Z() '`
+Y=`grep -iE '(pie|annulus)' ${SBP_REG} | head -n 1 | awk -F',' '{ print $2 }' | tr -d 'a-zA-Z() '`
+
+# dmcoords to convert (x,y) to (ra,dec)
+printf "\`dmcoords' to convert (x,y) to (ra,dec) ...\n"
+punlearn dmcoords
+dmcoords infile="${EVT}" asolfile="@${BASEDIR}/${ASOLIS}" option=sky x=${X} y=${Y}
+RA=`pget dmcoords ra`
+DEC=`pget dmcoords dec`
+
+printf "## (x,y): ($X,$Y)\n"
+printf "## (ra,dec): ($RA,$DEC)\n"
+
+if [ "${F_UPDATE}" = "YES" ]; then
+ cp -f ${INFO_JSON} ${INFO_JSON}_bak
+ printf "update xcentroid for info.json ...\n"
+ if grep -qE 'XCNTRD_(RA|DEC)' ${INFO_JSON}; then
+ printf "update ...\n"
+ sed -i'' "s/XCNTRD_RA.*$/XCNTRD_RA\":\ \"${RA}\",/" ${INFO_JSON}
+ sed -i'' "s/XCNTRD_DEC.*$/XCNTRD_DEC\":\ \"${DEC}\",/" ${INFO_JSON}
+ else
+ printf "add ...\n"
+ sed -i'' "/\"Dec\.\"/ a\
+\ \ \ \ \"XCNTRD_DEC\": \"${DEC}\"," ${INFO_JSON}
+ sed -i'' "/\"Dec\.\"/ a\
+\ \ \ \ \"XCNTRD_RA\": \"${RA}\"," ${INFO_JSON}
+ fi
+fi
+## main }}}
+
+exit 0
+
diff --git a/scripts/chandra_xcentroid.sh b/scripts/chandra_xcentroid.sh
new file mode 100755
index 0000000..14be6a5
--- /dev/null
+++ b/scripts/chandra_xcentroid.sh
@@ -0,0 +1,317 @@
+#!/bin/sh
+#
+###########################################################
+## get the coord of the X-ray centroid in given evt file ##
+## 1) given `evt_clean' file ##
+## 2) `aconvolve' and then `dmstat' ##
+## 3) `dmcoords' convert `sky x, y' to `ra, dec' ##
+## ##
+## NOTES: ##
+## support ACIS-I(chip: 0-3) and ACIS-S(chip: 7) ##
+## determine by check `DETNAM' for chip number ##
+## if `DETNAM' has `0123', then `ACIS-I' ##
+## if `DETNAM' has `7', then `ACIS-S' ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## November 8, 2012 ##
+###########################################################
+
+###########################################################
+## ChangeLogs:
+## v2.1, 2013/10/12, LIweitiaNux
+## add support for extract center coordinates from 'point' and 'circle' regs
+## v2.0, 2013/01/22, LIweitiaNux
+## aconvolve switch
+## add iterations for better confidence
+## v1.1, 2012/11/08, LIweitiaNux
+## get x-ray peak coord from given region file
+###########################################################
+
+## about, used in `usage' {{{
+VERSION="v2.1"
+UPDATE="2013-10-12"
+## about }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+ERR_DET=41
+## error code }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt_cl> reg=<reg> [ asol=<asol> chip=<chip> ] [ conv=yes|No ]\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## check ciao init & solve confilt with heasoft {{{
+if [ -z "${ASCDS_INSTALL}" ]; then
+ printf "ERROR: CIAO NOT initialized\n"
+ exit ${ERR_CIAO}
+fi
+
+## XXX: heasoft's `pget' etc. tools conflict with some CIAO tools
+printf "set \$PATH to avoid conflicts between HEAsoft and CIAO\n"
+export PATH="${ASCDS_BIN}:${ASCDS_CONTRIB}:${PATH}"
+printf "## PATH: ${PATH}\n"
+
+## ciao & heasoft }}}
+
+## default parameters {{{
+# critical offset (in pixel)
+OFFSET_CRIC=10
+# energy range: 700-2000 eV
+E_RANGE="700:2000"
+# default `evt clean file'
+DFT_EVT="`ls evt*clean.fits *clean*evt*.fits 2> /dev/null | head -n 1`"
+# default `asol file'
+DFT_ASOL="`ls pcadf*_asol1.fits 2> /dev/null | head -n 1`"
+# default region file
+DFT_REG="`ls sbprofile.reg rspec.reg 2> /dev/null | head -n 1`"
+# iteration step, ~150 arcsec, ~50 arcsec
+R_STP1=300
+R_STP2=100
+## default parameters }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+## functions }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+## check given parameters
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "evt clean file: " EVT
+ if ! [ -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n"
+
+# asol
+if [ ! -z "${asol}" ]; then
+ ASOL=${asol}
+elif [ -r "${DFT_ASOL}" ]; then
+ ASOL=${DFT_ASOL}
+else
+ # read -p "asol file: " ASOL
+ # if ! [ -r "${ASOL}" ]; then
+ # printf "ERROR: cannot access given \`${ASOL}' asol file\n"
+ # exit ${ERR_ASOL}
+ # fi
+ ASOL="NO"
+ printf "## asol file not supplied !\n"
+fi
+printf "## use asol file: \`${ASOL}'\n"
+
+# region file (optional)
+if [ -r "${reg}" ]; then
+ REG=${reg}
+elif [ -r "${DFT_REG}" ]; then
+ REG=${DFT_REG}
+else
+ read -p "region file: " REG
+ if [ ! -r "${REG}" ]; then
+ printf "ERROR: cannot access given \`${REG}' region file\n"
+ exit ${ERR_REG}
+ fi
+fi
+printf "## use reg file: \`${REG}'\n"
+# get centroid from the regionnn file
+CNTRD_X2=`grep -iE '(point|circle|pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $1 }'`
+CNTRD_Y2=`grep -iE '(point|circle|pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $2 }'`
+printf "## center from given regfile: (${CNTRD_X2},${CNTRD_Y2})\n"
+
+
+# convolve (optional)
+if [ -z "${conv}" ]; then
+ CONV="NO"
+else
+ case "${conv}" in
+ [yY]*)
+ CONV="YES"
+ printf "## apply \`aconvolve' !\n"
+ ;;
+ *)
+ CONV="NO"
+ ;;
+ esac
+fi
+
+# determine chip
+if [ ! -z "${chip}" ]; then
+ CHIP="${chip}"
+ printf "## use chip: \`${CHIP}'\n"
+else
+ # determine chip by ACIS type
+ punlearn dmkeypar
+ DETNAM=`dmkeypar ${EVT} DETNAM echo=yes`
+ if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+ printf "## \`DETNAM' (${DETNAM}) has chips 0123\n"
+ printf "## ACIS-I\n"
+ ACIS_TYPE="ACIS-I"
+ CHIP="0:3"
+ elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+ printf "## \`DETNAM' (${DETNAM}) has chip 7\n"
+ printf "## ACIS-S\n"
+ ACIS_TYPE="ACIS-S"
+ CHIP="7"
+ else
+ printf "ERROR: unknown detector type: ${DETNAM}\n"
+ exit ${ERR_DET}
+ fi
+fi
+## parameters }}}
+
+## dmstat.par {{{
+## 2013/02/07, LIweitiaNux
+## for some unknown reason, the wrong parameter file will cause dmstat failed to run
+printf "remove \`dmstat.par' ...\n"
+DMSTAT_PAR="$HOME/cxcds_param4/dmstat.par"
+[ -f "${DMSTAT_PAR}" ] && rm -fv ${DMSTAT_PAR}
+## dmstat.par }}}
+
+## main part {{{
+# generate `skyfov'
+SKYFOV="_skyfov.fits"
+printf "generate skyfov: \`${SKYFOV}' ...\n"
+punlearn skyfov
+skyfov infile="${EVT}" outfile="${SKYFOV}" clobber=yes
+
+# generate image
+IMG="img_c`echo ${CHIP} | tr ':' '-'`_e`echo ${E_RANGE} | tr ':' '-'`.fits"
+printf "generate image: \`${IMG}' ...\n"
+punlearn dmcopy
+dmcopy infile="${EVT}[sky=region(${SKYFOV}[ccd_id=${CHIP}])][energy=${E_RANGE}][bin sky=::1]" outfile="${IMG}" clobber=yes
+
+# aconvolve
+if [ "${CONV}" = "YES" ]; then
+ IMG_ACONV="${IMG%.fits}_aconv.fits"
+ KERNELSPEC="lib:gaus(2,5,1,10,10)"
+ METHOD="fft"
+ printf "\`aconvolve' to smooth img: \`${IMG_ACONV}' ...\n"
+ printf "## aconvolve: kernelspec=\"${KERNELSPEC}\" method=\"${METHOD}\"\n"
+ punlearn aconvolve
+ aconvolve infile="${IMG}" outfile="${IMG_ACONV}" kernelspec="${KERNELSPEC}" method="${METHOD}" clobber=yes
+else
+ IMG_ACONV=${IMG}
+fi
+
+# tmp analysis region
+TMP_REG="_tmp_centroid.reg"
+[ -r "${TMP_REG}" ] && rm -f ${TMP_REG}
+echo "circle(${CNTRD_X2},${CNTRD_Y2},${R_STP1})" > ${TMP_REG}
+
+# dmstat to find the centroid
+printf "\`dmstat' to find the centroid ...\n"
+# step1
+printf " region size ${R_STP1}pix: "
+for i in `seq 1 5`; do
+ printf "#$i ... "
+ punlearn dmstat
+ # dmstat infile="${IMG_ACONV}[sky=region(${TMP_REG})]" centroid=yes verbose=1
+ dmstat infile="${IMG_ACONV}[sky=region(${TMP_REG})]" centroid=yes verbose=0
+ CNTRD_X=`pget dmstat out_cntrd_phys | cut -d',' -f1`
+ CNTRD_Y=`pget dmstat out_cntrd_phys | cut -d',' -f2`
+ # printf "\n${CNTRD_X},${CNTRD_Y}\n"
+ echo "circle(${CNTRD_X},${CNTRD_Y},${R_STP1})" > ${TMP_REG}
+done
+printf " done\n"
+ echo "circle(${CNTRD_X},${CNTRD_Y},${R_STP2})" >${TMP_REG}
+# step2
+printf " region size ${R_STP2}pix: "
+for i in `seq 1 5`; do
+ printf "#$i ... "
+ punlearn dmstat
+ dmstat infile="${IMG_ACONV}[sky=region(${TMP_REG})]" centroid=yes verbose=0
+ CNTRD_X=`pget dmstat out_cntrd_phys | cut -d',' -f1`
+ CNTRD_Y=`pget dmstat out_cntrd_phys | cut -d',' -f2`
+ # printf "\n${CNTRD_X},${CNTRD_Y}\n"
+ echo "circle(${CNTRD_X},${CNTRD_Y},${R_STP2})" > ${TMP_REG}
+done
+printf " done\n"
+
+# calc offset vs. given
+OFFSET=`echo "scale=5; sqrt((${CNTRD_X}-${CNTRD_X2})^2 + (${CNTRD_Y}-${CNTRD_Y2})^2)" | bc -l`
+
+# output
+CNTRD_PHY_REG="centroid_phy.reg"
+[ -e "${CNTRD_PHY_REG}" ] && mv -f ${CNTRD_PHY_REG} ${CNTRD_PHY_REG}_bak
+echo "point(${CNTRD_X},${CNTRD_Y})" > ${CNTRD_PHY_REG}
+
+# dmcoords to convert (x,y) to (ra,dec)
+if [ -r "${ASOL}" ]; then
+ printf "\`dmcoords' to convert (x,y) to (ra,dec) ...\n"
+ punlearn dmcoords
+ dmcoords infile="${EVT}" asolfile="${ASOL}" option=sky x=${CNTRD_X} y=${CNTRD_Y}
+ CNTRD_RA=`pget dmcoords ra`
+ CNTRD_DEC=`pget dmcoords dec`
+ CNTRD_WCS_REG="centroid_wcs.reg"
+ [ -e "${CNTRD_WCS_REG}" ] && mv -f ${CNTRD_WCS_REG} ${CNTRD_WCS_REG}_bak
+ echo "point(${CNTRD_RA},${CNTRD_DEC})" > ${CNTRD_WCS_REG}
+ ## from region
+ punlearn dmcoords
+ dmcoords infile="${EVT}" asolfile="${ASOL}" option=sky x=${CNTRD_X2} y=${CNTRD_Y2}
+ CNTRD_RA2=`pget dmcoords ra`
+ CNTRD_DEC2=`pget dmcoords dec`
+fi
+
+printf "\n"
+printf "++++++++++++++++++++++++++++++++++++++++++++\n"
+printf "X-ray centroid coordinates:\n"
+printf "via dmstat:\n"
+printf " (X,Y): (${CNTRD_X},${CNTRD_Y})\n"
+if [ -r "${ASOL}" ]; then
+ printf " (RA,DEC): (${CNTRD_RA},${CNTRD_DEC})\n"
+fi
+printf "via region:\n"
+printf " (X2,Y2): (${CNTRD_X2},${CNTRD_Y2})\n"
+if [ -r "${ASOL}" ]; then
+ printf " (RA2,DEC2): (${CNTRD_RA2},${CNTRD_DEC2})\n"
+fi
+printf "offset (unit pixel):\n"
+printf " offset: ${OFFSET}\n"
+if [ `echo "${OFFSET} > ${OFFSET_CRIC}" | bc -l` -eq 1 ]; then
+ printf "*****************************\n"
+ printf "*** WARNING: large offset ***\n"
+fi
+printf "++++++++++++++++++++++++++++++++++++++++++++\n"
+## main }}}
+
+exit 0
+
diff --git a/scripts/chandra_xpeak_coord.sh b/scripts/chandra_xpeak_coord.sh
new file mode 100755
index 0000000..8b0644a
--- /dev/null
+++ b/scripts/chandra_xpeak_coord.sh
@@ -0,0 +1,222 @@
+#!/bin/sh
+#
+###########################################################
+## get the coord of the X-ray peak in given evt file ##
+## 1) given `evt_clean' file ##
+## 2) `aconvolve' and then `dmstat' ##
+## 3) `dmcoords' convert `sky x, y' to `ra, dec' ##
+## ##
+## NOTES: ##
+## support ACIS-I(chip: 0-3) and ACIS-S(chip: 7) ##
+## determine by check `DETNAM' for chip number ##
+## if `DETNAM' has `0123', then `ACIS-I' ##
+## if `DETNAM' has `7', then `ACIS-S' ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## November 8, 2012 ##
+###########################################################
+
+###########################################################
+## ChangeLogs:
+## v1.1, 2012/11/08, LIweitiaNux
+## get x-ray peak coord from given region file
+###########################################################
+
+## about, used in `usage' {{{
+VERSION="v1.1"
+UPDATE="2012-11-08"
+## about }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+ERR_DET=41
+## error code }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt_cl> asol=<asol> [ reg=<reg> chip=<chip> ]\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default `evt clean file'
+DFT_EVT="`ls evt*clean.fits *clean*evt*.fits 2> /dev/null | head -n 1`"
+# default `asol file'
+DFT_ASOL="`ls ../pcadf*_asol1.fits pcadf*_asol1.fits 2> /dev/null | head -n 1`"
+# default region file
+DFT_REG="`ls sbprofile.reg rspec.reg 2> /dev/null | head -n 1`"
+## default parameters }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+## functions }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+## check given parameters
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "evt clean file: " EVT
+ if ! [ -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n"
+
+# asol
+if [ ! -z "${asol}" ]; then
+ ASOL=${asol}
+elif [ -r "${DFT_ASOL}" ]; then
+ ASOL=${DFT_ASOL}
+else
+ read -p "asol file: " ASOL
+ if ! [ -r "${ASOL}" ]; then
+ printf "ERROR: cannot access given \`${ASOL}' asol file\n"
+ exit ${ERR_ASOL}
+ fi
+fi
+printf "## use asol file: \`${ASOL}'\n"
+
+# region file (optional)
+if [ ! -z "${reg}" ]; then
+ REG=${reg}
+else
+ REG=${DFT_REG}
+fi
+printf "## use reg file: \`${REG}'\n"
+
+# determine chip
+if [ ! -z "${chip}" ]; then
+ CHIP="${chip}"
+ printf "## use chip: \`${CHIP}'\n"
+else
+ # determine chip by ACIS type
+ punlearn dmkeypar
+ DETNAM=`dmkeypar ${EVT} DETNAM echo=yes`
+ if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+ printf "## \`DETNAM' (${DETNAM}) has chips 0123\n"
+ printf "## ACIS-I\n"
+ ACIS_TYPE="ACIS-I"
+ CHIP="0:3"
+ elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+ printf "## \`DETNAM' (${DETNAM}) has chip 7\n"
+ printf "## ACIS-S\n"
+ ACIS_TYPE="ACIS-S"
+ CHIP="7"
+ else
+ printf "ERROR: unknown detector type: ${DETNAM}\n"
+ exit ${ERR_DET}
+ fi
+fi
+## parameters }}}
+
+## main part {{{
+# generate `skyfov'
+SKYFOV="_skyfov.fits"
+printf "generate skyfov: \`${SKYFOV}' ...\n"
+punlearn skyfov
+skyfov infile="${EVT}" outfile="${SKYFOV}" clobber=yes
+
+# generate image
+# energy range: 500-7000 eV
+E_RANGE="500:7000"
+IMG="img_c`echo ${CHIP} | tr ':' '-'`_e`echo ${E_RANGE} | tr ':' '-'`.fits"
+printf "generate image: \`${IMG}' ...\n"
+punlearn dmcopy
+dmcopy infile="${EVT}[sky=region(${SKYFOV}[ccd_id=${CHIP}])][energy=${E_RANGE}][bin sky=::1]" outfile="${IMG}" clobber=yes
+
+# aconvolve
+IMG_ACONV="${IMG%.fits}_aconv.fits"
+KERNELSPEC="lib:gaus(2,5,1,10,10)"
+METHOD="fft"
+printf "\`aconvolve' to smooth img: \`${IMG_ACONV}' ...\n"
+printf "## aconvolve: kernelspec=\"${KERNELSPEC}\" method=\"${METHOD}\"\n"
+punlearn aconvolve
+aconvolve infile="${IMG}" outfile="${IMG_ACONV}" kernelspec="${KERNELSPEC}" method="${METHOD}" clobber=yes
+
+# dmstat
+printf "\`dmstat' to analyze the img ...\n"
+punlearn dmstat
+dmstat infile="${IMG_ACONV}"
+MAX_X=`pget dmstat out_max_loc | cut -d',' -f1`
+MAX_Y=`pget dmstat out_max_loc | cut -d',' -f2`
+# dmcoords to convert (x,y) to (ra,dec)
+printf "\`dmcoords' to convert (x,y) to (ra,dec) ...\n"
+punlearn dmcoords
+dmcoords infile="${EVT}" asolfile="${ASOL}" option=sky x=${MAX_X} y=${MAX_Y}
+MAX_RA=`pget dmcoords ra`
+MAX_DEC=`pget dmcoords dec`
+
+# output results
+PHY_REG="peak_phy.reg"
+WCS_REG="peak_wcs.reg"
+[ -e "${PHY_REG}" ] && mv -f ${PHY_REG} ${PHY_REG}_bak
+[ -e "${WCS_REG}" ] && mv -f ${WCS_REG} ${WCS_REG}_bak
+echo "point(${MAX_X},${MAX_Y})" > ${PHY_REG}
+echo "point(${MAX_RA},${MAX_DEC})" > ${WCS_REG}
+
+printf "\n"
+printf "++++++++++++++++++++++++++++++++++++++++++++\n"
+printf "X-ray peak coordinates:\n"
+printf "via dmstat:\n"
+printf " (X,Y): (${MAX_X},${MAX_Y})\n"
+printf " (RA,DEC): (${MAX_RA},${MAX_DEC})\n"
+
+## region file based {{{
+if [ -r "${REG}" ]; then
+ MAX_X2=`grep -iE '(pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $1 }'`
+ MAX_Y2=`grep -iE '(pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $2 }'`
+ punlearn dmcoords
+ dmcoords infile="${EVT}" asolfile="${ASOL}" option=sky x=${MAX_X2} y=${MAX_Y2}
+ MAX_RA2=`pget dmcoords ra`
+ MAX_DEC2=`pget dmcoords dec`
+ # calc offset
+ OFFSET=`echo "scale=5; sqrt((${MAX_X}-${MAX_X2})^2 + (${MAX_Y}-${MAX_Y2})^2)" | bc -l`
+
+ printf "via region:\n"
+ printf " (X2,Y2): (${MAX_X2},${MAX_Y2})\n"
+ printf " (RA2,DEC2): (${MAX_RA2},${MAX_DEC2})\n"
+ printf "offset (unit pixel):\n"
+ printf " offset: ${OFFSET}\n"
+fi
+## region file }}}
+printf "++++++++++++++++++++++++++++++++++++++++++++\n"
+## main }}}
+
+exit 0
+
diff --git a/scripts/ciao_bkg_spectra_v4.sh b/scripts/ciao_bkg_spectra_v4.sh
new file mode 100755
index 0000000..bc40151
--- /dev/null
+++ b/scripts/ciao_bkg_spectra_v4.sh
@@ -0,0 +1,416 @@
+#!/bin/sh -
+#
+trap date INT
+unalias -a
+export GREP_OPTIONS=""
+export LC_COLLATE=C
+###########################################################
+## extract background spectra from src and blanksky ##
+## renormalization the blank spectrum ##
+## ##
+## Ref: Chandra spectrum analysis ##
+## http://cxc.harvard.edu/ciao/threads/extended/ ##
+## Ref: specextract ##
+## http://cxc.harvard.edu/ciao/ahelp/specextract.html ##
+## Ref: CIAO v4.4 region bugs ##
+## http://cxc.harvard.edu/ciao/bugs/regions.html#bug-12187
+## ##
+## LIweitiaNux, July 24, 2012 ##
+###########################################################
+
+###########################################################
+# ChangeLogs:
+# v3, 2012/08/09
+# fix `scientific notation' for `bc'
+# change `spec group' method to `min 15'
+# v4, 2012/08/13
+# add `clobber=yes'
+# improve error code
+# improve cmdline arguements
+# provide a flexible way to pass parameters
+# (through cmdline which similar to CIAO,
+# and default filename match patterns)
+# add simple `logging' function
+###########################################################
+
+## about, used in `usage' {{{
+VERSION="v4"
+UPDATE="2012-08-14"
+## about }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt2_clean> reg=<reglist> blank=<blanksky_evt> basedir=<base_dir> nh=<nH> z=<redshift> [ grpcmd=<grppha_cmd> log=<log_file> ]\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default `event file' which used to match `blanksky' files
+#DFT_EVT="_NOT_EXIST_"
+DFT_EVT="`ls evt2*_clean.fits`"
+# default `blanksky file'
+#DFT_BLANK="_NOT_EXIST_"
+DFT_BLANK="`ls blanksky*.fits`"
+# default dir which contains `asols, asol.lis, ...' files
+#DFT_BASEDIR="_NOT_EXIST_"
+DFT_BASEDIR=".."
+# default `group command' for `grppha'
+#DFT_GRP_CMD="group 1 128 2 129 256 4 257 512 8 513 1024 16"
+DFT_GRP_CMD="group min 20"
+# default `log file'
+DFT_LOGFILE="bkg_spectra_`date '+%Y%m%d'`.log"
+
+## howto find files in `basedir'
+# default `asol.lis pattern'
+DFT_ASOLIS_PAT="acis*asol?.lis"
+# default `bad pixel filename pattern'
+DFT_BPIX_PAT="acis*repro*bpix?.fits"
+# default `pbk file pattern'
+DFT_PBK_PAT="acis*pbk?.fits"
+# default `msk file pattern'
+DFT_MSK_PAT="acis*msk?.fits"
+## default parameters }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+## error code }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+
+## background renormalization (BACKSCAL) {{{
+# renorm background according to particle background
+# energy range: 9.5-12.0 keV (channel: 651-822)
+CH_LOW=651
+CH_HI=822
+pb_flux() {
+ punlearn dmstat
+ COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | grep -i 'sum:' | awk '{ print $2 }'`
+ punlearn dmkeypar
+ EXPTIME=`dmkeypar $1 EXPOSURE echo=yes`
+ BACK=`dmkeypar $1 BACKSCAL echo=yes`
+ # fix `scientific notation' bug for `bc'
+ EXPTIME_B=`echo ${EXPTIME} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_B=`echo "( ${BACK} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ PB_FLUX=`echo "scale = 16; ${COUNTS} / ${EXPTIME_B} / ${BACK_B}" | bc -l`
+ echo ${PB_FLUX}
+}
+
+bkg_renorm() {
+ # $1: src spectrum, $2: back spectrum
+ PBFLUX_SRC=`pb_flux $1`
+ PBFLUX_BKG=`pb_flux $2`
+ BACK_OLD=`dmkeypar $2 BACKSCAL echo=yes`
+ BACK_OLD_B=`echo "( ${BACK_OLD} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_NEW=`echo "scale = 16; ${BACK_OLD_B} * ${PBFLUX_BKG} / ${PBFLUX_SRC}" | bc -l`
+ printf "\`$2': BACKSCAL:\n"
+ printf " ${BACK_OLD} --> ${BACK_NEW}\n"
+ punlearn dmhedit
+ dmhedit infile=$2 filelist=none operation=add \
+ key=BACKSCAL value=${BACK_NEW} comment="old value: ${BACK_OLD}"
+}
+## bkg renorm }}}
+## functions end }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+## check log parameters {{{
+if [ ! -z "${log}" ]; then
+ LOGFILE="${log}"
+else
+ LOGFILE=${DFT_LOGFILE}
+fi
+printf "## use logfile: \`${LOGFILE}'\n"
+[ -e "${LOGFILE}" ] && mv -fv ${LOGFILE} ${LOGFILE}_bak
+TOLOG="tee -a ${LOGFILE}"
+echo "process script: `basename $0`" >> ${LOGFILE}
+echo "process date: `date`" >> ${LOGFILE}
+## log }}}
+
+# check given parameters
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "clean evt2 file: " EVT
+ if [ ! -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n" | ${TOLOG}
+# check given region file(s)
+if [ -z "${reg}" ]; then
+ read -p "> selected local bkg region file: " REGLIST
+else
+ REGLIST="${reg}"
+fi
+REGLIST=`echo ${REGLIST} | tr ',' ' '` # use *space* to separate
+printf "## use reg file(s): \`${REGLIST}'\n" | ${TOLOG}
+# check given blanksky
+if [ -r "${blank}" ]; then
+ BLANK=${blank}
+elif [ -r "${DFT_BLANK}" ]; then
+ BLANK=${DFT_BLANK}
+else
+ read -p "> matched blanksky evtfile: " BLANK
+ if [ ! -r "${BLANK}" ]; then
+ printf "ERROR: cannot acces given \`${BLANK}' blanksky file\n"
+ exit ${ERR_BKG}
+ fi
+fi
+# check given nH
+if [ -z "${nh}" ]; then
+ read -p "> value of nH: " N_H
+else
+ N_H=${nh}
+fi
+printf "## use nH: ${N_H}\n" | ${TOLOG}
+# check given redshift
+if [ -z "${z}" ]; then
+ read -p "> value of redshift: " REDSHIFT
+else
+ REDSHIFT=${z}
+fi
+printf "## use redshift: ${REDSHIFT}\n" | ${TOLOG}
+# check given dir
+if [ -d "${basedir}" ]; then
+ BASEDIR=${basedir}
+elif [ -d "${DFT_BASEDIR}" ]; then
+ BASEDIR=${DFT_BASEDIR}
+else
+ read -p "> basedir (contains asol files): " BASEDIR
+ if [ ! -d "${BASEDIR}" ]; then
+ printf "ERROR: given \`${BASEDIR}' NOT a directory\n"
+ exit ${ERR_DIR}
+ fi
+fi
+# remove the trailing '/'
+BASEDIR=`echo ${BASEDIR} | sed 's/\/*$//'`
+printf "## use basedir: \`${BASEDIR}'\n" | ${TOLOG}
+# check given `grpcmd'
+if [ ! -z "${grpcmd}" ]; then
+ GRP_CMD="${grpcmd}"
+else
+ GRP_CMD="${DFT_GRP_CMD}"
+fi
+printf "## use grppha cmd: \`${GRP_CMD}'\n" | ${TOLOG}
+## parameters }}}
+
+## check needed files {{{
+# check reg file(s)
+printf "check accessibility of reg file(s) ...\n"
+for reg_f in ${REGLIST}; do
+ if [ ! -r "${reg_f}" ]; then
+ printf "ERROR: file \`${reg_f}' NOT accessiable\n"
+ exit ${ERR_REG}
+ fi
+done
+# check the validity of *pie* regions
+printf "check pie reg validity ...\n"
+INVALID=`cat ${REGLIST} | grep -i 'pie' | awk -F, '{ print $6 }' | tr -d ')' | awk '$1 > 360'`
+if [ "x${INVALID}" != "x" ]; then
+ printf "WARNING: some pie region's END_ANGLE > 360\n" | ${TOLOG}
+ printf " CIAO v4.4 tools may run into trouble\n"
+fi
+
+# check files in `basedir'
+printf "check needed files in basedir \`${BASEDIR}' ...\n"
+# check asolis files
+ASOLIS=`ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
+if [ -z "${ASOLIS}" ]; then
+ printf "ERROR: cannot find \"${DFT_ASOLIS_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_ASOL}
+fi
+printf "## use asolis: \`${ASOLIS}'\n" | ${TOLOG}
+# check badpixel file
+BPIX=`ls -1 ${BASEDIR}/${DFT_BPIX_PAT} | head -n 1`
+if [ -z "${BPIX}" ]; then
+ printf "ERROR: cannot find \"${DFT_BPIX_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_BPIX}
+fi
+printf "## use badpixel: \`${BPIX}'\n" | ${TOLOG}
+# check pbk file
+PBK=`ls -1 ${BASEDIR}/${DFT_PBK_PAT} | head -n 1`
+if [ -z "${PBK}" ]; then
+ printf "ERROR: cannot find \"${DFT_PBK_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_PBK}
+fi
+printf "## use pbk: \`${PBK}'\n" | ${TOLOG}
+# check msk file
+MSK=`ls -1 ${BASEDIR}/${DFT_MSK_PAT} | head -n 1`
+if [ -z "${MSK}" ]; then
+ printf "ERROR: cannot find \"${DFT_MSK_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_MSK}
+fi
+printf "## use msk: \`${MSK}'\n" | ${TOLOG}
+## check files }}}
+
+## main part {{{
+## use 'for' loop to process every region file
+for reg_i in ${REGLIST}; do
+ printf "\n==============================\n"
+ printf "PROCESS REGION fle \`${reg_i}' ...\n"
+
+ REG_TMP="_tmp.reg"
+ [ -f "${REG_TMP}" ] && rm -fv ${REG_TMP} # remove tmp files
+ cp -fv ${reg_i} ${REG_TMP}
+ # check the validity of *pie* regions {{{
+ INVALID=`grep -i 'pie' ${REG_TMP} | awk -F, '{ print $6 }' | tr -d ')' | awk '$1 > 360'`
+ if [ "x${INVALID}" != "x" ]; then
+ printf "WARNING: fix for *pie* region in file \`${reg_i}'\n"
+ cat ${REG_TMP}
+ A_OLD=`echo ${INVALID} | sed 's/\./\\\./'`
+ A_NEW=`echo ${INVALID}-360 | bc -l | sed 's/\./\\\./'`
+ sed -i'' "s/${A_OLD}\ *)/${A_NEW})/" ${REG_TMP}
+ printf " --> "
+ cat ${REG_TMP}
+ fi
+ ## check pie region }}}
+
+ LBKG_PI="${reg_i%.reg}.pi"
+ ## use `specextract' to extract local bkg spectrum {{{
+ # NOTE: set `binarfwmap=2' to save the time for generating `ARF'
+ # I have tested that this bin factor has little impact on the results.
+ # NO background response files
+ # NO background spectrum (generate by self)
+ # NO spectrum grouping (group by self using `grppha')
+ printf "use \`specextract' to generate spectra and response ...\n"
+ punlearn specextract
+ specextract infile="${EVT}[sky=region(${REG_TMP})]" \
+ outroot=${LBKG_PI%.pi} bkgfile="" asp="@${ASOLIS}" \
+ pbkfile="${PBK}" mskfile="${MSK}" badpixfile="${BPIX}" \
+ weight=yes correct=no bkgresp=no \
+ energy="0.3:11.0:0.01" channel="1:1024:1" \
+ combine=no binarfwmap=2 \
+ grouptype=NONE binspec=NONE \
+ verbose=2 clobber=yes
+ # `specextract' }}}
+
+ ## generate the blanksky bkg spectrum {{{
+ printf "generate the blanksky bkg spectrum ...\n"
+ BBKG_PI="blanksky_${LBKG_PI}"
+ punlearn dmextract
+ dmextract infile="${BLANK}[sky=region(${REG_TMP})][bin pi]" \
+ outfile=${BBKG_PI} wmap="[bin det=8]" clobber=yes
+ ## blanksky bkg spectrum }}}
+
+ ## bkg renormalization {{{
+ printf "Renormalize background ...\n"
+ bkg_renorm ${LBKG_PI} ${BBKG_PI}
+ ## bkg renorm }}}
+
+ ## group spectrum {{{
+ printf "group spectrum using \`grppha'\n"
+ LBKG_GRP_PI="${LBKG_PI%.pi}_grp.pi"
+ grppha infile="${LBKG_PI}" outfile="${LBKG_GRP_PI}" \
+ comm="${GRP_CMD} & exit" clobber=yes > /dev/null
+ ## group spectra }}}
+
+ ## generate a script for XSPEC {{{
+ XSPEC_XCM="xspec_${LBKG_PI%.pi}_model.xcm"
+ if [ -e ${XSPEC_XCM} ]; then
+ mv -fv ${XSPEC_XCM} ${XSPEC_XCM}_bak
+ fi
+ cat >> ${XSPEC_XCM} << _EOF_
+## xspec script
+## analysis chandra acis background components
+## xspec model: apec+apec+wabs*(pow+apec)
+##
+## generated by script \``basename $0`'
+## `date`
+## NOTES: needs XSPEC v12.x
+
+# settings
+statistic chi
+#weight churazov
+abund grsa
+query yes
+
+# data
+data ${LBKG_GRP_PI}
+response ${LBKG_PI%.pi}.wrmf
+arf ${LBKG_PI%.pi}.warf
+backgrnd ${BBKG_PI}
+
+# fitting range
+ignore bad
+ignore 0.0-0.4,8.0-**
+
+# plot related
+setplot energy
+
+method leven 10 0.01
+xsect bcmc
+cosmo 70 0 0.73
+xset delta 0.01
+systematic 0
+
+# model
+model apec + apec + wabs(powerlaw + apec)
+ 0.08 -0.01 0.008 0.008 64 64
+ 1 -0.001 0 0 5 5
+ 0 -0.01 -0.999 -0.999 10 10
+ 0.0 0.01 -1 0 0 1
+ 0.2 -0.01 0.008 0.008 64 64
+ 1 -0.001 0 0 5 5
+ 0 -0.01 -0.999 -0.999 10 10
+ 0.0 0.01 -1 0 0 1
+ ${N_H} -0.001 0 0 100000 1e+06
+ 1.4 -0.01 -3 -2 9 10
+ 0.0 0.01 -1 0 0 1
+ 1.0 0.01 0.008 0.008 64 64
+ 0.4 0.001 0 0 5 5
+ ${REDSHIFT} -0.01 -0.999 -0.999 10 10
+ 0.0 0.01 0 0 1e+24 1e+24
+
+freeze 1 2 3
+freeze 5 6 7
+freeze 9 10 14
+thaw 12 13
+_EOF_
+ ## XSPEC script }}}
+
+done # end 'for', `specextract'
+## main part }}}
+
+# clean
+printf "clean ...\n"
+rm -f ${REG_TMP}
+
+printf "DONE\n"
+###########################################################
+
+# vim: set ts=8 sw=4 tw=0 fenc=utf-8 ft=sh: #
diff --git a/scripts/ciao_blanksky_v4.sh b/scripts/ciao_blanksky_v4.sh
new file mode 100755
index 0000000..da1d6f6
--- /dev/null
+++ b/scripts/ciao_blanksky_v4.sh
@@ -0,0 +1,242 @@
+#!/bin/sh
+#
+unalias -a
+export LC_COLLATE=C
+###########################################################
+## process `blanksky' bkg files for spectra analysis ##
+## http://cxc.harvard.edu/ciao/threads/acisbackground/ ##
+## ##
+## for extracting the spectrum of the background ##
+## for determining the background components ##
+## ##
+## inputs: `evt2_clean', `asol files' ##
+## output: `blanksky_c7.fits' for ACIS-S, ##
+## `blanksky-c0-3.fits' for ACIS-I ##
+## ##
+## LIweitiaNux, January 11, 2011 ##
+###########################################################
+
+###########################################################
+## ChangeLogs:
+## v2: 2012/08/01
+## add ACIS-I support (chips 0-3)
+## v3: 2012/08/06, Junhua Gu
+## pass parameters by cmd line param
+## v4: 2012/08/13, LIweitiaNux
+## add `clobber=yes' parameters to CIAO tools
+## improve `commandline arguements'
+## add `default parameters'
+###########################################################
+
+## about, used in `usage' {{{
+VERSION="v4"
+UPDATE="2012-08-14"
+## about }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt2_clean> basedir=<base_dir> [ outfile=<outfile_name> ]\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default `event file' which used to match `blanksky' files
+#DFT_EVT="_NOT_EXIST_"
+DFT_EVT="`ls evt2*_clean.fits`"
+# default dir which contains `asols, asol.lis, ...' files
+# DFT_BASEDIR="_NOT_EXIST_"
+DFT_BASEDIR=".."
+
+## howto find files in `basedir'
+# default `asol.lis pattern'
+DFT_ASOLIS_PAT="acis*asol?.lis"
+## default parameters }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+## error code }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+
+# reprocess blanksky evt with matched gainfile
+blank_regain() {
+ mv $1 ${1%.fits}_ungain.fits
+ punlearn acis_process_events
+ acis_process_events infile="${1%.fits}_ungain.fits" \
+ outfile="$1" \
+ acaofffile=NONE stop="none" doevtgrade=no \
+ apply_cti=yes apply_tgain=no \
+ calculate_pi=yes pix_adj=NONE \
+ gainfile="$2" \
+ eventdef="{s:ccd_id,s:node_id,i:expno,s:chip,s:tdet,f:det,f:sky,s:phas,l:pha,l:pha_ro,f:energy,l:pi,s:fltgrade,s:grade,x:status}" \
+ clobber=yes
+ rm -fv ${1%.fits}_ungain.fits
+}
+## functions end }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+# check given parameters
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "evt2 file: " EVT
+ if ! [ -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n"
+# check given dir
+if [ -d "${basedir}" ]; then
+ BASEDIR=${basedir}
+elif [ -d "${DFT_BASEDIR}" ]; then
+ BASEDIR=${DFT_BASEDIR}
+else
+ read -p "basedir (contains asol files): " BASEDIR
+ if [ ! -d ${BASEDIR} ]; then
+ printf "ERROR: given \`${BASEDIR}' NOT a directory\n"
+ exit ${ERR_DIR}
+ fi
+fi
+# remove the trailing '/'
+BASEDIR=`echo ${BASEDIR} | sed 's/\/*$//'`
+printf "## use basedir: \`${BASEDIR}'\n"
+## parameters }}}
+
+## check files in `basedir' {{{
+# check asol files
+ASOLIS=`ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
+if [ -z ${ASOLIS} ]; then
+ printf "ERROR: cannot find \"${DFT_ASOLIS_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_ASOL}
+fi
+printf "## use asolis: \`${ASOLIS}'\n"
+## check files }}}
+#exit 0 # test script
+
+#### main start {{{
+printf "look up coresponding background file ...\n"
+BKG_LKP="`acis_bkgrnd_lookup ${EVT}`"
+AS_NUM=`echo ${BKG_LKP} | tr ' ' '\n' | \grep 'acis7sD' | wc -l`
+AI_NUM=`echo ${BKG_LKP} | tr ' ' '\n' | \grep 'acis[0123]iD' | wc -l`
+## determine detector type: ACIS-S / ACIS-I {{{
+if [ ${AS_NUM} -eq 1 ]; then
+ printf "## ACIS-S, chip: 7\n"
+ BKG_ROOT="blanksky_c7"
+ cp -v ${BKG_LKP} ${BKG_ROOT}_orig.fits
+elif [ ${AI_NUM} -eq 4 ]; then
+ printf "## ACIS-I, chip: 0-3\n"
+ BKG_ROOT="blanksky_c0-3"
+ AI_FILES=""
+ for bf in ${BKG_LKP}; do
+ cp -v ${bf} .
+ AI_FILES="${AI_FILES},`basename ${bf}`"
+ done
+ AI_FILES=${AI_FILES#,} # remove the first ','
+ printf "## ACIS-I blanksky files to merge:\n"
+ printf "## \`${AI_FILES}'\n"
+ printf "\`dmmerge' to merge the above blanksky files ...\n"
+ # merge 4 chips blanksky evt files
+ punlearn dmmerge
+ dmmerge "${AI_FILES}" ${BKG_ROOT}_orig.fits clobber=yes
+ rm -fv `echo ${AI_FILES} | tr ',' ' '` # remove original files
+else
+ printf "## ERROR: UNKNOW blanksky files:\n"
+ printf "## ${BKG_ORIG}\n"
+ exit ${ERR_BKG}
+fi
+## determine ACIS type }}}
+
+## check 'DATMODE' {{{
+## filter blanksky files (status=0) for `VFAINT' observations
+DATA_MODE="`dmkeypar ${EVT} DATAMODE echo=yes`"
+printf "## DATAMODE: ${DATA_MODE}\n"
+if [ "${DATA_MODE}" = "VFAINT" ]; then
+ mv -fv ${BKG_ROOT}_orig.fits ${BKG_ROOT}_tmp.fits
+ printf "apply \`status=0' to filter blanksky file ...\n"
+ punlearn dmcopy
+ dmcopy "${BKG_ROOT}_tmp.fits[status=0]" ${BKG_ROOT}_orig.fits clobber=yes
+ rm -fv ${BKG_ROOT}_tmp.fits
+fi
+## DATAMODE, status=0 }}}
+
+## check `GAINFILE' of blanksky and evt2 file {{{
+## if NOT match, then reprocess blanksky
+GAINFILE_EVT="`dmkeypar ${EVT} GAINFILE echo=yes`"
+GAINFILE_BG="`dmkeypar "${BKG_ROOT}_orig.fits" GAINFILE echo=yes`"
+if ! [ "${GAINFILE_EVT}" = "${GAINFILE_BG}" ]; then
+ printf "WARNING: GAINFILE NOT match.\n"
+ printf "event: ${GAINFILE_EVT}\n"
+ printf "blank: ${GAINFILE_BG}\n"
+ printf "reprocess blanksky with evt gainfile ...\n"
+ # reprocess blanksky using matched evt GAINFILE
+ GAINFILE="$CALDB/data/chandra/acis/det_gain/`basename ${GAINFILE_EVT}`"
+ printf "GAINFILE: ${GAINFILE}\n"
+ blank_regain "${BKG_ROOT}_orig.fits" ${GAINFILE}
+fi
+## check & match GAINFILE }}}
+
+printf "add the PNT header keywords ... "
+EVT_HEADER="_evt_header.par"
+EVT_PNT="_evt_pnt.par"
+punlearn dmmakepar
+dmmakepar ${EVT} ${EVT_HEADER} clobber=yes
+grep -i '_pnt' ${EVT_HEADER} > ${EVT_PNT}
+punlearn dmreadpar
+dmreadpar ${EVT_PNT} "${BKG_ROOT}_orig.fits[EVENTS]" clobber=yes
+printf "DONE\n"
+
+printf "reproject the background ...\n"
+punlearn reproject_events
+reproject_events infile=${BKG_ROOT}_orig.fits \
+ outfile=${BKG_ROOT}.fits match=${EVT} \
+ aspect="@${ASOLIS}" random=0 clobber=yes
+
+# rename output file if specified
+if ! [ -z "${outfile}" ]; then
+ mv -fv ${BKG_ROOT}.fits ${outfile}
+fi
+## main end }}}
+
+# clean
+printf "\nclean ...\n"
+rm -fv ${BKG_ROOT}_orig.fits ${EVT_PNT}
+
+printf "\nFINISHED\n"
+
+# vim: set ts=8 sw=4 tw=0 fenc=utf-8 ft=sh #
diff --git a/scripts/ciao_calc_csb.sh b/scripts/ciao_calc_csb.sh
new file mode 100755
index 0000000..92741b1
--- /dev/null
+++ b/scripts/ciao_calc_csb.sh
@@ -0,0 +1,235 @@
+#!/bin/sh
+#
+# for 'z>0.3' or 'counts_in_0.048R500<500'
+# execute this script in dir 'spc/profile'
+#
+# original filename: 'proxy_calc.sh', by Zhu Zhenhao
+# modified by: Weitian LI
+#
+# ChangeLog:
+# 2014/06/18: added answer for WR (warn region)
+#
+
+## error code {{{
+ERR_USG=1
+ERR_CALC=11
+ERR_DIR=12
+ERR_JSON=13
+ERR_EXPMAP=14
+ERR_EVTE=15
+ERR_Z=21
+ERR_CNT=22
+## }}}
+
+## cosmology claculator {{{
+## write the path of cosmo claculator here
+BASE_PATH=`dirname $0`
+COSMO_CALC=`which cosmo_calc`
+if [ -z "${COSMO_CALC}" ] || [ ! -x ${COSMO_CALC} ] ; then
+ printf "ERROR: ${COSMO_CALC} neither executable nor specified\n"
+ exit ${ERR_CALC}
+fi
+## }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt_e=<evt_e_name> expmap=<expmap_name> basedir=<base_dir> imgdir=<img_dir> json=<json_name>\n"
+ printf "NOTE: exec this script in dir 'spc/profile'\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default basedir relative to 'spc/profile'
+DFT_BASEDIR="../.."
+# default imgdir relative to 'basedir'
+DFT_IMGDIR="img"
+# default expmap pattern
+DFT_EXPMAP_PAT="expmap_c*.fits"
+# default evt_e pattern
+DFT_EVTE_PAT="evt2_c*_e700-7000.fits"
+# default json file pattern
+DFT_JSON_PAT="*_INFO.json"
+#
+RSPEC_REG="rspec.reg"
+CSB_RES="csb_results.txt"
+#
+INIT_DIR=`pwd -P`
+## }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+## functions }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+# basedir
+if [ -d "${basedir}" ] && ls ${basedir}/*repro_evt2.fits > /dev/null 2>&1; then
+ BASEDIR=${basedir}
+elif [ -d "${DFT_BASEDIR}" ] && ls ${DFT_BASEDIR}/*repro_evt2.fits > /dev/null 2>&1; then
+ BASEDIR=${DFT_BASEDIR}
+else
+ read -p "> basedir (contains info json): " BASEDIR
+ if [ ! -d "${BASEDIR}" ] || ! ls ${BASEDIR}/*repro_evt2.fits >/dev/null 2>&1; then
+ printf "ERROR: given \`${BASEDIR}' invalid!\n"
+ exit ${ERR_DIR}
+ fi
+fi
+BASEDIR=`( cd ${BASEDIR} && pwd -P )`
+printf "## use basedir: \`${BASEDIR}'\n"
+# img dir
+if [ ! -z "${imgdir}" ] && [ -d "${BASEDIR}/${imgdir}" ]; then
+ IMG_DIR=`( cd ${BASEDIR}/${imgdir} && pwd -P )`
+elif [ -d "${BASEDIR}/${DFT_IMGDIR}" ]; then
+ IMG_DIR=`( cd ${BASEDIR}/${DFT_IMGDIR} && pwd -P )`
+else
+ read -p "> img dir (relative to basedir): " IMG_DIR
+ if [ ! -d "${BASEDIR}/${IMG_DIR}" ]; then
+ printf "ERROR: given \`${IMG_DIR}' invalid\n"
+ exit ${ERR_DIR}
+ else
+ IMG_DIR="${BASEDIR}/${IMG_DIR}"
+ fi
+fi
+printf "## use imgdir: \`${IMG_DIR}'\n"
+# info json
+if [ ! -z "${json}" ] && [ -r "${BASEDIR}/${json}" ]; then
+ JSON_FILE="${BASEDIR}/${json}"
+elif [ `ls -1 ${BASEDIR}/${DFT_JSON_PAT} 2>/dev/null | wc -l` -eq 1 ]; then
+ JSON_FILE="`ls ${BASEDIR}/${DFT_JSON_PAT} 2>/dev/null`"
+else
+ read -p "> info json: " JSON_FILE
+ if [ ! -r "${BASEDIR}/${JSON_FILE}" ]; then
+ printf "ERROR: given \`${JSON_FILE}' not exist!\n"
+ exit ${ERR_JSON}
+ fi
+fi
+printf "## use json_file: \`${JSON_FILE}'\n"
+# expmap
+if [ ! -z "${expmap}" ] && [ -r "${IMG_DIR}/${expmap}" ]; then
+ EXPMAP="${expmap}"
+elif [ `ls -1 ${IMG_DIR}/${DFT_EXPMAP_PAT} 2>/dev/null | wc -l` -eq 1 ]; then
+ EXPMAP="`( cd ${IMG_DIR} && ls ${DFT_EXPMAP_PAT} 2>/dev/null )`"
+else
+ read -p "> expmap filename: " EXPMAP
+ if [ ! -r "${IMG_DIR}/${EXPMAP}" ]; then
+ printf "ERROR: given \`${EXPMAP}' not exist!\n"
+ exit ${ERR_EXPMAP}
+ fi
+fi
+printf "## use expmap: \`${EXPMAP}'\n"
+# evt_e
+if [ ! -z "${evt_e}" ] && [ -r "${IMG_DIR}/${evt_e}" ]; then
+ EVT_E="${evt_e}"
+elif [ `ls -1 ${IMG_DIR}/${DFT_EVTE_PAT} 2>/dev/null | wc -l` -eq 1 ]; then
+ EVT_E="`( cd ${IMG_DIR} && ls ${DFT_EVTE_PAT} 2>/dev/null )`"
+else
+ read -p "> evt_e filename: " EVT_E
+ if [ ! -r "${IMG_DIR}/${EVT_E}" ]; then
+ printf "ERROR: given \`${EVT_E}' not exist!\n"
+ exit ${ERR_EVTE}
+ fi
+fi
+printf "## use evt_e: \`${EVT_E}'\n"
+## }}}
+
+## main {{{
+# in 'spc/profile'
+X=`grep -iE '(pie|annulus)' ${RSPEC_REG} | head -n 1 | awk -F'(' '{ print $2 }' | awk -F',' '{ print $1 }'`
+Y=`grep -iE '(pie|annulus)' ${RSPEC_REG} | head -n 1 | awk -F'(' '{ print $2 }' | awk -F',' '{ print $2 }'`
+# json file
+Z=`grep -i '"redshift"' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+R500=`grep '"R500.*kpc' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+OBS_ID=`grep '"Obs.*ID' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+OBJ_NAME=`grep '"Source\ Name' ${JSON_FILE} | awk -F':' '{ print $2 }' | sed -e 's/\ *"//' -e 's/"\ *,$//'`
+CT=`grep '"Cooling_time' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+
+cd ${IMG_DIR}
+printf "entered img directory\n"
+
+### test Z>0.3?
+if [ `echo "${Z} < 0.3" | bc -l` -eq 1 ]; then
+ F_WZ=true
+ WZ="WZ"
+ printf "*** WARNING: redshift z=${Z} < 0.3 ***\n"
+# exit ${ERR_Z}
+fi
+
+KPC_PER_PIXEL=`${COSMO_CALC} ${Z} | grep 'kpc/pixel' | awk '{ print $3 }'`
+RC_PIX=`echo "scale=2; 0.048 * ${R500} / ${KPC_PER_PIXEL}" | bc -l`
+# test counts_in_0.048R500<500?
+RC_REG="pie(${X},${Y},0,${RC_PIX},0,360)"
+punlearn dmlist
+CNT_RC=`dmlist infile="${EVT_E}[sky=${RC_REG}]" opt=block | grep 'EVENTS' | awk '{ print $8 }'`
+printf "R500=${R500}, 0.048R500_pix=${RC_PIX}, counts_in_0.048R500=${CNT_RC}\n"
+if [ ${CNT_RC} -gt 500 ]; then
+ F_WC=true
+ WC="WC"
+ printf "*** WARNING: counts_in_0.048R500=${CNT_RC} > 500 ***\n"
+# exit ${ERR_CNT}
+fi
+
+TMP_REG="_tmp_csb.reg"
+TMP_S="_tmp_csb.fits"
+
+R1=`echo "scale=2; 40 / ${KPC_PER_PIXEL}" | bc -l`
+R2=`echo "scale=2; 400 / ${KPC_PER_PIXEL}" | bc -l`
+cat > ${TMP_REG} << _EOF_
+pie(${X},${Y},0,${R1},0,360)
+pie(${X},${Y},0,${R2},0,360)
+_EOF_
+
+printf "CHECK the regions (R1=${R1}, R2=${R2}) ...\n"
+ds9 ${EVT_E} -regions ${TMP_REG} -cmap sls -bin factor 4
+read -p "> Whether the region exceeds ccd edge?(y/N) " F_WR
+case "${F_WR}" in
+ [yY]*)
+ WR="WR"
+ ;;
+ *)
+ WR=""
+ ;;
+esac
+
+punlearn dmextract
+dmextract infile="${EVT_E}[bin sky=@${TMP_REG}]" outfile="${TMP_S}" exp=${EXPMAP} opt=generic clobber=yes
+punlearn dmlist
+S1=`dmlist "${TMP_S}[cols SUR_FLUX]" opt="data,clean" | grep -v '#' | sed -n -e 's/\ *//' -e '1p'`
+S2=`dmlist "${TMP_S}[cols SUR_FLUX]" opt="data,clean" | grep -v '#' | sed -n -e 's/\ *//' -e '2p'`
+CSB=`echo "${S1} ${S2}" | awk '{ print $1/$2/100 }'`
+
+## back to original spc/profile directory
+cd ${INIT_DIR}
+
+[ -e ${CSB_RES} ] && mv -f ${CSB_RES} ${CSB_RES}_bak
+printf "\n==============================\n"
+printf "z=${Z}, R500=${R500} (kpc)\n" | tee -a ${CSB_RES}
+printf "0.048R500=${RC_PIX}, counts=${CNT_RC}\n" | tee -a ${CSB_RES}
+printf "R1=${R1}, R2=${R2} (pixel)\n" | tee -a ${CSB_RES}
+printf "S1=${S1}, S2=${S2} (sur_flux)\n" | tee -a ${CSB_RES}
+printf "C_sb: ${CSB}\n" | tee -a ${CSB_RES}
+[ "x${F_WZ}" = "xtrue" ] && printf "${WZ}\n" | tee -a ${CSB_RES}
+[ "x${F_WC}" = "xtrue" ] && printf "${WC}\n" | tee -a ${CSB_RES}
+printf "# OBS_ID,OBJ_NAME,Z,R500,RC_PIX,CNT_RC,CT,R1_PIX,R2_PIX,S1,S2,CSB,WZ,WC,WR\n" | tee -a ${CSB_RES}
+printf "# $OBS_ID,$OBJ_NAME,$Z,$R500,$RC_PIX,$CNT_RC,$CT,$R1,$R2,$S1,$S2,$CSB,$WZ,$WC,$WR\n\n" | tee -a ${CSB_RES}
+## main }}}
+
+exit 0
+
diff --git a/scripts/ciao_calc_ct.sh b/scripts/ciao_calc_ct.sh
new file mode 100755
index 0000000..6fb1d79
--- /dev/null
+++ b/scripts/ciao_calc_ct.sh
@@ -0,0 +1,593 @@
+#!/bin/sh
+#
+unalias -a
+export LC_COLLATE=C
+###########################################################
+## based on `ciao_r500avgt' ##
+## for calculating the `cooling time ' ##
+## within (0.-0.048 r500) region ##
+## ##
+## Junhua Gu ##
+## August 22, 2012 ##
+## ##
+## LIweitiaNux ##
+## 2013/04/28 ##
+###########################################################
+
+###########################################################
+## ChangeLogs
+## 2014/06/18: 'cooling_time2.sh' -> 'ciao_calc_ct.sh'
+###########################################################
+
+## comology calculator {{{
+## XXX: MODIFY THIS TO YOUR OWN CASE
+## and make sure this `calc' is executable
+## NOTES: use `$HOME' instead of `~' in path
+BASE_PATH=`dirname $0`
+COSCALC="`which cosmo_calc calc_distance | head -n 1`"
+if [ -z "${COSCALC}" ] || [ ! -x ${COSCALC} ]; then
+ printf "ERROR: \`COSCALC: ${COSCALC}' neither specified nor executable\n"
+ exit 255
+fi
+## }}}
+
+## about, used in `usage' {{{
+VERSION="v1.1"
+UPDATE="2012-08-26"
+## about }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+ERR_ARF=51
+ERR_RMF=52
+ERR_UNI=61
+## error code }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt2_clean> r500=<r500_kpc> regin=<input_reg> regout=<output_reg> bkgd=<blank_evt | lbkg_reg | bkg_spec> nh=<nH> z=<redshift> arf=<warf_file> rmf=<wrmf_file> [ grpcmd=<grppha_cmd> log=<log_file> ]\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default `event file' which used to match `blanksky' files
+#DFT_EVT="_NOT_EXIST_"
+DFT_EVT="`ls evt2*_clean.fits 2> /dev/null`"
+# default `bkgd', use `bkgcorr_blanksky*' corrected bkg spectrum
+DFT_BKGD="`ls bkgcorr_blanksky_*.pi 2> /dev/null`"
+# default basedir
+DFT_BASEDIR="../.."
+# default info_json pattern
+DFT_JSON_PAT="*_INFO.json"
+# default `radial region file'
+#DFT_REG_IN="_NOT_EXIST_"
+DFT_REG_IN="rspec.reg"
+# default output region file (0.1-0.5 r500 region)
+DFT_REG_OUT="cooling.reg"
+# default ARF/RMF, the one of the outmost region
+DFT_ARF="`ls r1_*.warf 2> /dev/null`"
+DFT_RMF="`ls r1_*.wrmf 2> /dev/null`"
+
+# default `group command' for `grppha'
+#DFT_GRP_CMD="group 1 128 2 129 256 4 257 512 8 513 1024 16"
+DFT_GRP_CMD="group min 20"
+# default `log file'
+DFT_LOGFILE="cooling_`date '+%Y%m%d'`.log"
+
+INNER=0.0
+OUTER=0.048
+CT_RES="cooling_results.txt"
+# default output xspec scripts
+XSPEC_SCRIPT="xspec_cooling.xcm"
+# deproj xspec script, generated by `deproj_spectra'
+# from which get `nh' and `redshift'
+XSPEC_DEPROJ="xspec_deproj.xcm"
+## default parameters }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+
+## background renormalization (BACKSCAL) {{{
+# renorm background according to particle background
+# energy range: 9.5-12.0 keV (channel: 651-822)
+CH_LOW=651
+CH_HI=822
+pb_flux() {
+ punlearn dmstat
+ COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | grep -i 'sum:' | awk '{ print $2 }'`
+ punlearn dmkeypar
+ EXPTIME=`dmkeypar $1 EXPOSURE echo=yes`
+ BACK=`dmkeypar $1 BACKSCAL echo=yes`
+ # fix `scientific notation' bug for `bc'
+ EXPTIME_B=`echo ${EXPTIME} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_B=`echo "( ${BACK} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ PB_FLUX=`echo "scale = 16; ${COUNTS} / ${EXPTIME_B} / ${BACK_B}" | bc -l`
+ echo ${PB_FLUX}
+}
+
+bkg_renorm() {
+ # $1: src spectrum, $2: back spectrum
+ PBFLUX_SRC=`pb_flux $1`
+ PBFLUX_BKG=`pb_flux $2`
+ BACK_OLD=`dmkeypar $2 BACKSCAL echo=yes`
+ BACK_OLD_B=`echo "( ${BACK_OLD} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_NEW=`echo "scale = 16; ${BACK_OLD_B} * ${PBFLUX_BKG} / ${PBFLUX_SRC}" | bc -l`
+ printf "\`$2': BACKSCAL:\n"
+ printf " ${BACK_OLD} --> ${BACK_NEW}\n"
+ punlearn dmhedit
+ dmhedit infile=$2 filelist=none operation=add \
+ key=BACKSCAL value=${BACK_NEW} comment="old value: ${BACK_OLD}"
+}
+## bkg renorm }}}
+## functions end }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+## check log parameters {{{
+if [ ! -z "${log}" ]; then
+ LOGFILE="${log}"
+else
+ LOGFILE=${DFT_LOGFILE}
+fi
+printf "## use logfile: \`${LOGFILE}'\n"
+[ -e "${LOGFILE}" ] && mv -fv ${LOGFILE} ${LOGFILE}_bak
+TOLOG="tee -a ${LOGFILE}"
+echo "process script: `basename $0`" >> ${LOGFILE}
+echo "process date: `date`" >> ${LOGFILE}
+## log }}}
+
+# check given parameters
+# process `nh' and `redshift' {{{
+if [ ! -r "${XSPEC_DEPROJ}" ]; then
+ printf "## the script \`${XSPEC_DEPROJ}' generated by \`deproj_spectra' NOT found\n" | ${TOLOG}
+ printf "## please supply the value of \`nh' and \`redshift' manully\n" | ${TOLOG}
+ read -p "> value of nh: " N_H
+ read -p "> value of redshift: " REDSHIFT
+else
+ # get `nh' and `redshift' from xspec script
+ LN=`grep -n 'projct\*wabs\*apec' ${XSPEC_DEPROJ} | tail -n 1 | cut -d':' -f1`
+ # calc the line number of which contains `nh'
+ LN_NH=`expr ${LN} + 4`
+ NH_XCM=`head -n ${LN_NH} ${XSPEC_DEPROJ} | tail -n 1 | awk '{ print $1 }'`
+ # calc the line number of `redshift'
+ LN_RS=`expr ${LN} + 7`
+ RS_XCM=`head -n ${LN_RS} ${XSPEC_DEPROJ} | tail -n 1 | awk '{ print $1 }'`
+ printf "## get value of nh: \`${NH_XCM}' (from \`${XSPEC_DEPROJ}')\n" | ${TOLOG}
+ printf "## get value of redshift: \`${RS_XCM}' (from \`${XSPEC_DEPROJ}')\n" | ${TOLOG}
+
+ ## if `nh' and `redshift' supplied in cmdline, then use them
+ if [ ! -z "${nh}" ]; then
+ N_H=${nh}
+ else
+ N_H=${NH_XCM}
+ fi
+ # redshift
+ if [ ! -z "${z}" ]; then
+ REDSHIFT=${z}
+ else
+ REDSHIFT=${RS_XCM}
+ fi
+fi
+printf "## use nH: ${N_H}\n" | ${TOLOG}
+printf "## use redshift: ${REDSHIFT}\n" | ${TOLOG}
+# nh & redshift }}}
+
+# check basedir & json file
+if [ -d "${basedir}" ]; then
+ BASEDIR=${basedir}
+else
+ BASEDIR=${DFT_BASEDIR}
+fi
+if [ ! -z "${json}" ] && [ -r "${BASEDIR}/${json}" ]; then
+ JSON_FILE="${BASEDIR}/${json}"
+elif [ `ls ${BASEDIR}/${DFT_JSON_PAT} 2> /dev/null | wc -l` -eq 1 ]; then
+ JSON_FILE=`ls ${BASEDIR}/${DFT_JSON_PAT}`
+else
+ read -p "> JSON_file: " JSON_FILE
+ if [ ! -r "${JSON_FILE}" ]; then
+ printf "ERROR: cannot access given \`${JSON_FILE}'\n"
+ exit ${ERR_JSON}
+ fi
+fi
+printf "## use json_file: \`${JSON_FILE}'\n" | ${TOLOG}
+
+# process `r500' {{{
+R500_RAW=`grep '"R500.*kpc' ${JSON_FILE} | sed 's/.*"R500.*":\ //' | sed 's/\ *,$//'`
+if [ ! -z "${r500}" ]; then
+ R500_RAW=${r500}
+fi
+if [ -z "${R500_RAW}" ]; then
+ printf "## input R500 followed with unit, e.g.: 800kpc, 400pix\n"
+ read -p "> value of \`R500' (in pixel/kpc): " R500_RAW
+fi
+R500_VAL=`echo "${R500_RAW}" | tr -d 'a-zA-Z, '`
+R500_UNI=`echo "${R500_RAW}" | tr -d '0-9, '`
+printf "## get \`R500': ${R500_VAL} in unit \`${R500_UNI}'\n" | ${TOLOG}
+
+# if in kpc, convert to pix
+case "${R500_UNI}" in
+ [pP]*)
+ printf "## units in \`pixel', conversion not needed\n" | ${TOLOG}
+ R500_PIX_B=`echo ${R500_VAL} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ ;;
+ *)
+ printf "## units in \`kpc', convert to \`Chandra pixel'\n" | ${TOLOG}
+ KPC_PER_PIX=`${COSCALC} ${REDSHIFT} | grep 'kpc.*pix' | tr -d 'a-zA-Z_#=(),:/ '`
+ # convert scientific notation for `bc'
+ KPC_PER_PIX_B=`echo ${KPC_PER_PIX} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ printf "## calculated \`kpc/pixel': ${KPC_PER_PIX_B}\n"
+ R500_VAL_B=`echo ${R500_VAL} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ R500_PIX_B=`echo "scale = 4; ${R500_VAL_B} / ( ${KPC_PER_PIX_B} )" | bc -l`
+ ;;
+esac
+# calc (inner-outer R500)
+R_IN=`echo "scale = 4; ${INNER} * ${R500_PIX_B}" | bc -l`
+R_OUT=`echo "scale = 4; ${OUTER} * ${R500_PIX_B}" | bc -l`
+printf "## R500 in units pixel: ${R500_PIX_B}\n" | ${TOLOG}
+printf "## (${INNER}-${OUTER} R500) range in pixel: ${R_IN} - ${R_OUT}\n" | ${TOLOG}
+# r500 }}}
+
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "> clean evt2 file: " EVT
+ if [ ! -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n" | ${TOLOG}
+
+# input and output region files {{{
+if [ -r "${regin}" ]; then
+ REG_IN="${regin}"
+elif [ -r "${DFT_REG_IN}" ]; then
+ REG_IN=${DFT_REG_IN}
+else
+ read -p "> previous used radial spec regfile: " REG_IN
+ if [ ! -r "${REG_IN}" ]; then
+ printf "ERROR: cannot access given \`${REG_IN}' region file\n"
+ exit ${ERR_REG}
+ fi
+fi
+printf "## use previous regfile: \`${REG_IN}'\n" | ${TOLOG}
+if [ ! -z "${regout}" ]; then
+ REG_OUT="${regout}"
+else
+ REG_OUT=${DFT_REG_OUT}
+fi
+[ -e "${REG_OUT}" ] && mv -fv ${REG_OUT} ${REG_OUT}_bak
+printf "## set output regfile (0.1-0.5 r500 region): \`${REG_OUT}'\n" | ${TOLOG}
+
+# get center position from `regin'
+# only consider `pie' or `annulus'-shaped region
+TMP_REG=`grep -iE '(pie|annulus)' ${REG_IN} | head -n 1`
+XC=`echo ${TMP_REG} | tr -d ' ' | awk -F'[(),]' '{ print $2 }'`
+YC=`echo ${TMP_REG} | tr -d ' ' | awk -F'[(),]' '{ print $3 }'`
+printf "## get center coord: (${XC},${YC})\n" | ${TOLOG}
+# region files }}}
+
+# check given bkgd, determine background {{{
+if [ -r "${bkgd}" ]; then
+ BKGD=${bkgd}
+elif [ -r "${DFT_BKGD}" ]; then
+ BKGD=${DFT_BKGD}
+else
+ read -p "> background (blanksky_evt | lbkg_reg | bkg_spec): " BKGD
+ if [ ! -r "${BKGD}" ]; then
+ printf "ERROR: cannot access given \`${BKGD}'\n"
+ exit ${ERR_BKG}
+ fi
+fi
+printf "## use bkgd: \`${BKGD}'\n" | ${TOLOG}
+# determine bkg type: blanksky, lbkg_reg, bkg_spec ?
+# according to file type first: text / FITS
+# if FITS, then get values of `HDUCLAS1' and `OBJECT'
+if file -bL ${BKGD} | grep -qi 'text'; then
+ printf "## given \`${BKGD}' is a \`text file'\n"
+ printf "## use it as local bkg region file\n"
+ printf "## use *LOCAL BKG SPEC*\n" | ${TOLOG}
+ # just set flags, extract spectrum later
+ USE_LBKG_REG=YES
+ USE_BLANKSKY=NO
+ USE_BKG_SPEC=NO
+elif file -bL ${BKGD} | grep -qi 'FITS'; then
+ printf "## given \`${BKGD}' is a \`FITS file'\n"
+ # get FITS header keyword
+ HDUCLAS1=`dmkeypar ${BKGD} HDUCLAS1 echo=yes`
+ if [ "${HDUCLAS1}" = "EVENTS" ]; then
+ # event file
+ printf "## given file is \`event'\n"
+ # check if `blanksky' or `stowed bkg'
+ BKG_OBJ=`dmkeypar ${BKGD} OBJECT echo=yes`
+ if [ "${BKG_OBJ}" = "BACKGROUND DATASET" ] || [ "${BKG_OBJ}" = "ACIS STOWED" ]; then
+ # valid bkg evt file
+ printf "## given FITS file is a valid bkgrnd file\n"
+ printf "## use *BLANKSKY*\n" | ${TOLOG}
+ USE_BLANKSKY=YES
+ USE_LBKG_REG=NO
+ USE_BKG_SPEC=NO
+ # specify `BLANKSKY'
+ BLANKSKY=${BKGD}
+ else
+ # invalid bkg evt file
+ printf "ERROR: invalid bkg evt file given\n"
+ exit ${ERR_BKGTY}
+ fi
+ elif [ "${HDUCLAS1}" = "SPECTRUM" ]; then
+ # spectrum file
+ printf "## given file is \`spectrum'\n"
+ printf "## use *BKG SPECTRUM*\n" | ${TOLOG}
+ USE_BKG_SPEC=YES
+ USE_BLANKSKY=NO
+ USE_LBKG_REG=NO
+ # specify `BKG_SPEC'
+ BKG_SPEC=${BKGD}
+ else
+ # other type
+ printf "ERROR: other type FITS given\n"
+ exit ${ERR_BKGTY}
+ fi
+else
+ printf "ERROR: given \`${BKGD}' type UNKNOWN\n"
+ exit ${ERR_BKGTY}
+fi
+# bkgd }}}
+
+# check `arf' and `rmf' {{{
+if [ -r "${arf}" ]; then
+ ARF=${arf}
+elif [ -r "${DFT_ARF}" ]; then
+ ARF=${DFT_ARF}
+else
+ read -p "> provide the ARF to use: " ARF
+ if [ ! -r "${ARF}" ]; then
+ printf "ERROR: cannot access given \`${ARF}'\n"
+ exit ${ERR_ARF}
+ fi
+fi
+printf "## use ARF: \`${ARF}'\n" | ${TOLOG}
+# rmf
+if [ -r "${rmf}" ]; then
+ RMF=${rmf}
+elif [ -r "${DFT_RMF}" ]; then
+ RMF=${DFT_RMF}
+else
+ read -p "> provide the RMF to use: " RMF
+ if [ ! -r "${RMF}" ]; then
+ printf "ERROR: cannot access given \`${RMF}'\n"
+ exit ${ERR_RMF}
+ fi
+fi
+printf "## use RMF: \`${RMF}'\n" | ${TOLOG}
+# arf & rmf }}}
+
+# check given `grpcmd'
+if [ ! -z "${grpcmd}" ]; then
+ GRP_CMD="${grpcmd}"
+else
+ GRP_CMD="${DFT_GRP_CMD}"
+fi
+printf "## use grppha cmd: \`${GRP_CMD}'\n" | ${TOLOG}
+## parameters }}}
+
+##################################################
+#### main
+## D_A
+#D_A_CM=`${COSCALC} ${REDSHIFT} | grep '^d_a_cm' | awk '{ print $2 }'`
+D_A_CM=`${COSCALC} ${REDSHIFT} | grep -i 'd_a.*cm' | awk -F'=' '{ print $2 }' | awk '{ print $1 }'`
+printf "D_A_CM(${REDSHIFT})= ${D_A_CM}\n"
+
+## region related {{{
+## generate the needed region file
+printf "generate the output region file ...\n"
+cat > ${REG_OUT} << _EOF_
+# Region file format: CIAO version 1.0
+pie(${XC},${YC},${R_IN},${R_OUT},0,360)
+_EOF_
+
+## open the evt file to verify or modify
+printf "## check the generated pie region ...\n"
+printf "## if modified, save with the same name \`${REG_OUT}' (overwrite)\n"
+ds9 ${EVT} -regions ${REG_OUT} -cmap sls -bin factor 4
+
+## check the (modified) region (pie region end angle)
+printf "check the above region (for pie region end angle) ...\n"
+INVALID=`grep -i 'pie' ${REG_OUT} | awk -F'[,()]' '$7 > 360'`
+if [ "x${INVALID}" != "x" ]; then
+ printf "*** WARNING: there are pie regions' END_ANGLE > 360\n" | ${TOLOG}
+ printf "*** will to fix ...\n"
+ mv -fv ${REG_OUT} ${REG_OUT}_tmp
+ # using `awk' to fix
+ awk -F'[,()]' '{
+ if ($7 > 360) {
+ printf "%s(%.2f,%.2f,%.2f,%.2f,%.2f,%.2f)\n", $1,$2,$3,$4,$5,$6,($7-360)
+ }
+ else {
+ print $0
+ }
+ }' ${REG_OUT}_tmp > ${REG_OUT}
+ rm -f ${REG_OUT}_tmp
+fi
+## region related }}}
+
+## generate spectrum {{{
+# check counts
+punlearn dmlist
+CNT_RC=`dmlist infile="${EVT}[sky=region(${REG_OUT})][energy=700:7000]" opt=block | grep 'EVENTS' | awk '{ print $8 }'`
+if [ ${CNT_RC} -lt 500 ]; then
+ F_WC=true
+ WC="LOW_COUNTS"
+ printf "*** WARNING: counts_in_0.048R500=${CNT_RC} < 500 ***\n"
+fi
+
+# object
+printf "extract object spectrum \`${AVGT_SPEC}' ...\n"
+AVGT_SPEC="${REG_OUT%.reg}.pi"
+AVGT_SPEC_GRP="${AVGT_SPEC%.pi}_grp.pi"
+punlearn dmextract
+dmextract infile="${EVT}[sky=region(${REG_OUT})][bin PI]" \
+ outfile="${AVGT_SPEC}" wmap="[bin det=8]" clobber=yes
+# group spectrum
+printf "group object spectrum ...\n"
+grppha infile="${AVGT_SPEC}" outfile="${AVGT_SPEC_GRP}" \
+ comm="${GRP_CMD} & exit" clobber=yes > /dev/null
+
+# background
+printf "generate the background spectrum ...\n"
+AVGT_BKG="${AVGT_SPEC%.pi}_bkg.pi"
+if [ "${USE_BLANKSKY}" = "YES" ]; then
+ # use blanksky as background file
+ printf "extract spectrum from blanksky ...\n"
+ punlearn dmextract
+ dmextract infile="${BLANKSKY}[sky=region(${REG_OUT})][bin PI]" \
+ outfile=${AVGT_BKG} wmap="[bin det=8]" clobber=yes
+elif [ "${USE_LBKG_REG}" = "YES" ]; then
+ printf "extract local background ...\n"
+ punlearn dmextract
+ dmextract infile="${EVT}[sky=region(${BKGD})][bin PI]" \
+ outfile=${AVGT_BKG} wmap="[bin det=8]" clobber=yes
+elif [ "${USE_BKG_SPEC}" = "YES" ]; then
+ printf "copy specified background spectrum ...\n"
+ cp -fv ${BKG_SPEC} ${AVGT_BKG}
+fi
+
+printf "renormalize the background ...\n"
+bkg_renorm ${AVGT_SPEC} ${AVGT_BKG}
+## spectrum }}}
+
+## generate XSPEC script {{{
+printf "generate a XSPEC script ...\n"
+[ -e "${XSPEC_SCRIPT}" ] && mv -fv ${XSPEC_SCRIPT} ${XSPEC_SCRIPT}_bak
+cat > ${XSPEC_SCRIPT} << _EOF_
+## XSPEC script
+## spectrum analysis to get the average temperatue with (0-0.048 R500)
+##
+## generated by: \``basename $0`'
+## date: \``date`'
+##
+
+# xspec settings
+statistic chi
+abund grsa
+query yes
+
+# data
+data ${AVGT_SPEC_GRP}
+response ${RMF}
+arf ${ARF}
+backgrnd ${AVGT_BKG}
+
+# fitting range
+ignore bad
+ignore 0.0-0.7,7.0-**
+
+# plot related
+setplot energy
+
+method leven 10 0.01
+xsect bcmc
+cosmo 70 0 0.73
+xset delta 0.01
+systematic 0
+
+# model
+model wabs*apec
+ ${N_H} -0.001 0 0 100000 1e+06
+ 1.0 0.01 0.008 0.008 64 64
+ 0.4 0.001 0 0 5 5
+ ${REDSHIFT} -0.01 -0.999 -0.999 10 10
+ 0.0 0.01 0 0 1e+24 1e+24
+
+## xspec script end
+
+proc calc_cooling_time {} {
+ set rout ${R_OUT}
+ set d_a_cm ${D_A_CM}
+ fit 1000
+ tclout param 4
+ set z [ lindex \$xspec_tclout 0 ]
+ tclout param 2
+ set T [ lindex \$xspec_tclout 0 ]
+ tclout param 5
+ set norm [ lindex \$xspec_tclout 0 ]
+ newpar 1 0
+ dummyrsp .001 100
+ flux .001 100
+
+ tclout flux
+ set flux [ lindex \$xspec_tclout 0 ]
+ puts "flux(0.01-100kev): \$flux"
+ set rout_cm [ expr \$rout*.492/3600/180*3.14159*\$d_a_cm ]
+ set V [ expr 4./3.*3.14159*\$rout_cm*\$rout_cm*\$rout_cm ]
+ set nenh [ expr \$norm*1E14*4*3.14159*\$d_a_cm*\$d_a_cm*(1+\$z)*(1+\$z)*(1+\$z)*(1+\$z)/\$V ]
+ set d_l_cm [ expr \$d_a_cm*(1+\$z)*(1+\$z) ]
+ set ne_np_ratio 1.2
+ set ne [ expr sqrt(\$nenh*\$ne_np_ratio) ]
+ set lx [ expr \$flux*4*3.14159*\$d_l_cm*\$d_l_cm ]
+ set kb 1.602E-9
+ set ct [ expr 3./2.*(\$ne+\$ne/\$ne_np_ratio)*\$kb*\$T*\$V/\$lx ]
+ set ct_gyr [ expr \$ct/(3600*24*365.25*1E9) ]
+ puts "Cooling_time= \$ct_gyr Gyr"
+}
+
+fit 1000
+calc_cooling_time
+
+tclexit
+_EOF_
+## xspec script }}}
+
+## invoke xspec to calc
+if [ "x${F_WC}" = "xtrue" ]; then
+ printf "\n*** WC: LOW_COUNTS ***\n"
+ printf "*** WARNING: counts_in_0.048R500=${CNT_RC} < 500 ***\n"
+else
+ [ -e "${CT_RES}" ] && mv -f ${CT_RES} ${CT_RES}_bak
+ printf "invoking XSPEC to calculate cooling time ...\n"
+ xspec - ${XSPEC_SCRIPT} | tee ${CT_RES}
+
+ OBS_ID=`grep '"Obs.*ID' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+ OBJ_NAME=`grep '"Source\ Name' ${JSON_FILE} | awk -F':' '{ print $2 }' | sed -e 's/\ *"//' -e 's/"\ *,$//'`
+ CT=`grep -i '^Cooling_time' ${CT_RES} | awk '{ print $2 }'`
+
+ printf "\n" | tee -a ${CT_RES}
+ printf "# OBS_ID,OBJ_NAME,CT_gyr\n" | tee -a ${CT_RES}
+ printf "# $OBS_ID,$OBJ_NAME,$CT\n" | tee -a ${CT_RES}
+fi
+
+exit 0
+
diff --git a/scripts/ciao_calc_ct_csb.sh b/scripts/ciao_calc_ct_csb.sh
new file mode 100755
index 0000000..586c216
--- /dev/null
+++ b/scripts/ciao_calc_ct_csb.sh
@@ -0,0 +1,40 @@
+#!/bin/sh
+#
+###########################################################
+## Invoke 'ciao_calc_ct.sh' and 'ciao_calc_csb.sh' ##
+## to calculate cooling time and Csb value. ##
+## ##
+## Weitian LI ##
+## 2014/06/18 ##
+###########################################################
+
+BASE_PATH=`dirname $0`
+SCRIPT_CT="${BASE_PATH}/ciao_calc_ct.sh"
+SCRIPT_CSB="${BASE_PATH}/ciao_calc_csb.sh"
+
+echo "### CALCULATE COOLING TIME ###"
+echo "### ${SCRIPT_CT} ###"
+#ciao_calc_ct.sh
+
+echo "### CALCULATE CSB VALUE ###"
+echo "### ${SCRIPT_CSB} ###"
+#ciao_calc_csb.sh
+
+echo "### PROCESS RESULTS ###"
+CT_RES="cooling_results.txt"
+CSB_RES="csb_results.txt"
+
+# cooling time
+TITLE_CT=`grep -E '^#\s*[A-Z]+' ${CT_RES} | awk -F',' '{ print $3 }'`
+DATA_CT=`grep -E '^#\s*[0-9]+' ${CT_RES} | awk -F',' '{ print $3 }'`
+
+# Csb
+TITLE_CSB=`grep -E '^#\s*[A-Z]+' ${CSB_RES}`
+DATA_CSB=`grep -E '^#\s*[0-9]+' ${CSB_RES}`
+
+# output data
+echo "${TITLE_CSB},${TITLE_CT}"
+echo "${DATA_CSB},${DATA_CT}"
+
+exit 0
+
diff --git a/scripts/ciao_deproj_spectra_v8.sh b/scripts/ciao_deproj_spectra_v8.sh
new file mode 100755
index 0000000..3818a44
--- /dev/null
+++ b/scripts/ciao_deproj_spectra_v8.sh
@@ -0,0 +1,670 @@
+#!/bin/sh
+#
+trap date INT
+unalias -a
+export GREP_OPTIONS=""
+export LC_COLLATE=C
+#
+###########################################################
+## generate `src' and `bkg' spectra as well as ##
+## RMF/ARFs using `specextract' ##
+## for radial spectra analysis ##
+## to get temperature/abundance profile ##
+## XSPEC model `projct' for deprojection analysis ##
+## ##
+## Ref: Chandra spectrum analysis ##
+## http://cxc.harvard.edu/ciao/threads/extended/ ##
+## Ref: specextract ##
+## http://cxc.harvard.edu/ciao/ahelp/specextract.html ##
+## Ref: CIAO v4.4 region bugs ##
+## http://cxc.harvard.edu/ciao/bugs/regions.html#bug-12187
+## ##
+## LIweitiaNux, July 24, 2012 ##
+###########################################################
+
+###########################################################
+## ChangeLogs:
+## v5, 2012/08/05
+## XFLT0005 not modified as pie end angle
+## add `background renormalization'
+## v6, 2012/08/08, Gu Junhua
+## Modified to using config file to pass parameters
+## Use grppha to rebin the spectrum
+## v7, 2012/08/10, LIweitiaNux
+## account blanksky, local bkg, specified bkg
+## change name to `ciao_deproj_spectra_v*.sh'
+## add `error status'
+## Imporve ${CFG_FILE}
+## Imporve comments
+## v8, 2012/08/14, LIweitiaNux
+## use `cmdline' args instead of `cfg file'
+## add `logging' function
+###########################################################
+
+## about, used in `usage' {{{
+VERSION="v8"
+UPDATE="2012-08-14"
+## about }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt2_clean> reg=<radial_reg> bkgd=<blank_evt | lbkg_reg | bkg_spec> basedir=<base_dir> nh=<nH> z=<redshift> [ grpcmd=<grppha_cmd> log=<log_file> ]\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default `event file' which used to match `blanksky' files
+#DFT_EVT="_NOT_EXIST_"
+DFT_EVT="`ls evt2*_clean.fits`"
+# default `radial region file'
+#DFT_REG_IN="_NOT_EXIST_"
+DFT_REG_IN="rspec.reg"
+# default dir which contains `asols, asol.lis, ...' files
+# DFT_BASEDIR=".."
+DFT_BASEDIR="_NOT_EXIST_"
+# default `group command' for `grppha'
+#DFT_GRP_CMD="group 1 128 2 129 256 4 257 512 8 513 1024 16"
+#DFT_GRP_CMD="group 1 128 4 129 256 8 257 512 16 513 1024 32"
+DFT_GRP_CMD="group min 20"
+# default `log file'
+DFT_LOGFILE="deproj_spectra_`date '+%Y%m%d'`.log"
+
+# default output xspec scripts
+XSPEC_DEPROJ="xspec_deproj.xcm"
+XSPEC_PROJTD="xspec_projected.xcm"
+
+## howto find files in `basedir'
+# default `asol.lis pattern'
+DFT_ASOLIS_PAT="acis*asol?.lis"
+# default `bad pixel filename pattern'
+DFT_BPIX_PAT="acis*repro*bpix?.fits"
+# default `pbk file pattern'
+DFT_PBK_PAT="acis*pbk?.fits"
+# default `msk file pattern'
+DFT_MSK_PAT="acis*msk?.fits"
+## default parameters }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+## error code }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+
+## background renormalization (BACKSCAL) {{{
+# renorm background according to particle background
+# energy range: 9.5-12.0 keV (channel: 651-822)
+CH_LOW=651
+CH_HI=822
+pb_flux() {
+ punlearn dmstat
+ COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | grep -i 'sum:' | awk '{ print $2 }'`
+ punlearn dmkeypar
+ EXPTIME=`dmkeypar $1 EXPOSURE echo=yes`
+ BACK=`dmkeypar $1 BACKSCAL echo=yes`
+ # fix `scientific notation' bug for `bc'
+ EXPTIME_B=`echo ${EXPTIME} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_B=`echo "( ${BACK} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ PB_FLUX=`echo "scale = 16; ${COUNTS} / ${EXPTIME_B} / ${BACK_B}" | bc -l`
+ echo ${PB_FLUX}
+}
+
+bkg_renorm() {
+ # $1: src spectrum, $2: back spectrum
+ PBFLUX_SRC=`pb_flux $1`
+ PBFLUX_BKG=`pb_flux $2`
+ BACK_OLD=`dmkeypar $2 BACKSCAL echo=yes`
+ BACK_OLD_B=`echo "( ${BACK_OLD} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_NEW=`echo "scale = 16; ${BACK_OLD_B} * ${PBFLUX_BKG} / ${PBFLUX_SRC}" | bc -l`
+ printf "\`$2': BACKSCAL:\n"
+ printf " ${BACK_OLD} --> ${BACK_NEW}\n"
+ punlearn dmhedit
+ dmhedit infile=$2 filelist=none operation=add \
+ key=BACKSCAL value=${BACK_NEW} comment="old value: ${BACK_OLD}"
+}
+## bkg renorm }}}
+## functions end }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+## check log parameters {{{
+if [ ! -z "${log}" ]; then
+ LOGFILE="${log}"
+else
+ LOGFILE=${DFT_LOGFILE}
+fi
+printf "## use logfile: \`${LOGFILE}'\n"
+[ -e "${LOGFILE}" ] && mv -fv ${LOGFILE} ${LOGFILE}_bak
+TOLOG="tee -a ${LOGFILE}"
+echo "process script: `basename $0`" >> ${LOGFILE}
+echo "process date: `date`" >> ${LOGFILE}
+## log }}}
+
+# check given parameters
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "clean evt2 file: " EVT
+ if [ ! -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n" | ${TOLOG}
+# check given region file(s)
+if [ -r "${reg}" ]; then
+ REG_IN="${reg}"
+elif [ -r "${DFT_REG_IN}" ]; then
+ REG_IN=${DFT_REG_IN}
+else
+ read -p "> radial spec region file: " REG_IN
+ if [ ! -r "${REG_IN}" ]; then
+ printf "ERROR: cannot access given \`${REG_IN}' region file\n"
+ exit ${ERR_REG}
+ fi
+fi
+printf "## use radial reg: \`${REG_IN}'\n" | ${TOLOG}
+# check given bkgd, determine background {{{
+if [ -z "${bkgd}" ]; then
+ read -p "> background (blanksky_evt | lbkg_reg | bkg_spec): " BKGD
+else
+ BKGD=${bkgd}
+fi
+if [ ! -r "${BKGD}" ]; then
+ printf "ERROR: cannot access given \`${BKGD}'\n"
+ exit ${ERR_BKG}
+fi
+printf "## use bkgd: \`${BKGD}'\n" | ${TOLOG}
+# determine bkg type: blanksky, lbkg_reg, bkg_spec ?
+# according to file type first: text / FITS
+# if FITS, then get values of `HDUCLAS1' and `OBJECT'
+if file -bL ${BKGD} | grep -qi 'text'; then
+ printf "## given \`${BKGD}' is a \`text file'\n"
+ printf "## use it as local bkg region file\n"
+ printf "## use *LOCAL BKG SPEC*\n" | ${TOLOG}
+ # just set flags, extract spectrum later
+ USE_LBKG_REG=YES
+ USE_BLANKSKY=NO
+ USE_BKG_SPEC=NO
+elif file -bL ${BKGD} | grep -qi 'FITS'; then
+ printf "## given \`${BKGD}' is a \`FITS file'\n"
+ # get FITS header keyword
+ HDUCLAS1=`dmkeypar ${BKGD} HDUCLAS1 echo=yes`
+ if [ "${HDUCLAS1}" = "EVENTS" ]; then
+ # event file
+ printf "## given file is \`event'\n"
+ # check if `blanksky' or `stowed bkg'
+ BKG_OBJ=`dmkeypar ${BKGD} OBJECT echo=yes`
+ if [ "${BKG_OBJ}" = "BACKGROUND DATASET" ] || [ "${BKG_OBJ}" = "ACIS STOWED" ]; then
+ # valid bkg evt file
+ printf "## given FITS file is a valid bkgrnd file\n"
+ printf "## use *BLANKSKY*\n" | ${TOLOG}
+ USE_BLANKSKY=YES
+ USE_LBKG_REG=NO
+ USE_BKG_SPEC=NO
+ # specify `BLANKSKY'
+ BLANKSKY=${BKGD}
+ else
+ # invalid bkg evt file
+ printf "ERROR: invalid bkg evt file given\n"
+ exit ${ERR_BKGTY}
+ fi
+ elif [ "${HDUCLAS1}" = "SPECTRUM" ]; then
+ # spectrum file
+ printf "## given file is \`spectrum'\n"
+ printf "## use *BKG SPECTRUM*\n" | ${TOLOG}
+ USE_BKG_SPEC=YES
+ USE_BLANKSKY=NO
+ USE_LBKG_REG=NO
+ # specify `BKG_SPEC'
+ BKG_SPEC=${BKGD}
+ else
+ # other type
+ printf "ERROR: other type FITS given\n"
+ exit ${ERR_BKGTY}
+ fi
+else
+ printf "ERROR: given \`${BKGD}' type UNKNOWN\n"
+ exit ${ERR_BKGTY}
+fi
+# bkgd }}}
+# check given nH
+if [ -z "${nh}" ]; then
+ read -p "> value of nH: " N_H
+else
+ N_H=${nh}
+fi
+printf "## use nH: ${N_H}\n" | ${TOLOG}
+# check given redshift
+if [ -z "${z}" ]; then
+ read -p "> value of redshift: " REDSHIFT
+else
+ REDSHIFT=${z}
+fi
+printf "## use redshift: ${REDSHIFT}\n" | ${TOLOG}
+# check given dir
+if [ -d "${basedir}" ]; then
+ BASEDIR=${basedir}
+elif [ -d "${DFT_BASEDIR}" ]; then
+ BASEDIR=${DFT_BASEDIR}
+else
+ read -p "> basedir (contains asol files): " BASEDIR
+ if [ ! -d "${BASEDIR}" ]; then
+ printf "ERROR: given \`${BASEDIR}' NOT a directory\n"
+ exit ${ERR_DIR}
+ fi
+fi
+# remove the trailing '/'
+BASEDIR=`echo ${BASEDIR} | sed 's/\/*$//'`
+printf "## use basedir: \`${BASEDIR}'\n" | ${TOLOG}
+# check given `grpcmd'
+if [ ! -z "${grpcmd}" ]; then
+ GRP_CMD="${grpcmd}"
+else
+ GRP_CMD="${DFT_GRP_CMD}"
+fi
+printf "## use grppha cmd: \`${GRP_CMD}'\n" | ${TOLOG}
+# rootname for output files
+[ "x${ROOTNAME}" = "x" ] && ROOTNAME="${REG_IN%.reg}"
+printf "## use rootname: \`${ROOTNAME}'\n" | ${TOLOG}
+## parameters }}}
+
+## check needed files {{{
+# check the validity of *pie* regions
+printf "check pie reg validity ...\n"
+INVALID=`cat ${REG_IN} | grep -i 'pie' | awk -F, '{ print $6 }' | tr -d ')' | awk '$1 > 360'`
+if [ "x${INVALID}" != "x" ]; then
+ printf "WARNING: some pie region's END_ANGLE > 360\n" | ${TOLOG}
+ printf " CIAO v4.4 tools may run into trouble\n"
+fi
+
+# check files in `basedir'
+printf "check needed files in basedir \`${BASEDIR}' ...\n"
+# check asolis files
+ASOLIS=`ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
+if [ -z "${ASOLIS}" ]; then
+ printf "ERROR: cannot find \"${DFT_ASOLIS_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_ASOL}
+fi
+printf "## use asolis: \`${ASOLIS}'\n" | ${TOLOG}
+# check badpixel file
+BPIX=`ls -1 ${BASEDIR}/${DFT_BPIX_PAT} | head -n 1`
+if [ -z "${BPIX}" ]; then
+ printf "ERROR: cannot find \"${DFT_BPIX_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_BPIX}
+fi
+printf "## use badpixel: \`${BPIX}'\n" | ${TOLOG}
+# check pbk file
+PBK=`ls -1 ${BASEDIR}/${DFT_PBK_PAT} | head -n 1`
+if [ -z "${PBK}" ]; then
+ printf "ERROR: cannot find \"${DFT_PBK_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_PBK}
+fi
+printf "## use pbk: \`${PBK}'\n" | ${TOLOG}
+# check msk file
+MSK=`ls -1 ${BASEDIR}/${DFT_MSK_PAT} | head -n 1`
+if [ -z "${MSK}" ]; then
+ printf "ERROR: cannot find \"${DFT_MSK_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_MSK}
+fi
+printf "## use msk: \`${MSK}'\n" | ${TOLOG}
+## check files }}}
+
+## process local background {{{
+if [ "${USE_LBKG_REG}" = "YES" ]; then
+ BKG_EVT=${EVT}
+ LBKG_REG=${BKGD}
+ LBKG_REG_CIAO="_tmp_${LBKG_REG}"
+ cp -fv ${LBKG_REG} ${LBKG_REG_CIAO}
+ ## check background region (CIAO v4.4 bug) {{{
+ printf "check background region ...\n"
+ INVALID=`grep -i 'pie' ${LBKG_REG_CIAO} | awk -F, '{ print $6 }' | tr -d ')' | awk '$1 > 360'`
+ if [ "x${INVALID}" != "x" ]; then
+ printf "WARNING: fix for pie region:\n"
+ cat ${LBKG_REG_CIAO}
+ for angle in ${INVALID}; do
+ A_OLD=`echo ${angle} | sed 's/\./\\\./'`
+ A_NEW=`echo ${angle}-360 | bc -l | sed 's/\./\\\./'`
+ sed -i'' "s/${A_OLD}\ *)/${A_NEW})/" ${LBKG_REG_CIAO}
+ done
+ printf " -->"
+ cat ${LBKG_REG_CIAO}
+ printf "======================\n"
+ fi
+ ## background region }}}
+ ## extract local background spectrum
+ printf "extract local background spectrum ...\n"
+ BKG_SPEC="${LBKG_REG%.reg}.pi"
+ punlearn dmextract
+ dmextract infile="${BKG_EVT}[sky=region(${LBKG_REG_CIAO})][bin pi]" \
+ outfile=${BKG_SPEC} wmap="[bin det=8]" clobber=yes
+ rm -fv ${LBKG_REG_CIAO}
+ printf "renormalizing the spectrum later ...\n"
+fi
+## local bkg }}}
+
+## modify the region file, remove the commented and blank lines {{{
+REG_NEW="_new.reg"
+REG_TMP="_tmp.reg"
+[ -f ${REG_NEW} ] && rm -fv ${REG_NEW}
+[ -f ${REG_TMP} ] && rm -fv ${REG_TMP}
+cat ${REG_IN} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${REG_NEW}
+## REG_IN }}}
+
+## `specextract' to extract spectrum {{{
+LINES="`wc -l ${REG_NEW} | cut -d' ' -f1`"
+printf "\n======================================\n"
+printf "TOTAL *${LINES}* regions to process ......\n"
+for i in `seq ${LINES}`; do
+ printf "\n==============================\n"
+ printf ">>> PROCESS REGION ${i} ...\n"
+
+ ## generate corresponding `region' file
+ rm -f ${REG_TMP} ${REG_CIAO} 2> /dev/null
+ head -n ${i} ${REG_NEW} | tail -n 1 > ${REG_TMP}
+ REG_CIAO="${REG_TMP%.reg}_ciao.reg"
+ cp -fv ${REG_TMP} ${REG_CIAO}
+ ## check the validity of *pie* regions {{{
+ INVALID=`grep -i 'pie' ${REG_TMP} | awk -F, '{ print $6 }' | tr -d ')' | awk '$1 > 360'`
+ if [ "x${INVALID}" != "x" ]; then
+ printf "WARNING: fix for pie region:\n"
+ cat ${REG_CIAO}
+ A_OLD=`echo ${INVALID} | sed 's/\./\\\./'`
+ A_NEW=`echo ${INVALID}-360 | bc -l | sed 's/\./\\\./'`
+ sed -i'' "s/${A_OLD}\ *)/${A_NEW})/" ${REG_CIAO}
+ printf " -->\n"
+ cat ${REG_CIAO}
+ fi
+ # check pie region }}}
+
+ ## use `specextract' to extract spectra {{{
+ # NOTE: set `binarfwmap=2' to save the time for generating `ARF'
+ # I have tested that this bin factor has little impact on the results.
+ # NO background response files
+ # NO background spectrum (generate by self)
+ # NO spectrum grouping (group by self using `grppha')
+ punlearn specextract
+ specextract infile="${EVT}[sky=region(${REG_CIAO})]" \
+ outroot="r${i}_${ROOTNAME}" bkgfile="" asp="@${ASOLIS}" \
+ pbkfile="${PBK}" mskfile="${MSK}" badpixfile="${BPIX}" \
+ weight=yes correct=no bkgresp=no \
+ energy="0.3:11.0:0.01" channel="1:1024:1" \
+ combine=no binarfwmap=2 \
+ grouptype=NONE binspec=NONE \
+ clobber=yes verbose=2
+ ## specextract }}}
+
+ RSPEC_PI="r${i}_${ROOTNAME}.pi"
+ RSPEC_BKG_PI="${RSPEC_PI%.pi}_bkg.pi"
+ ## background spectrum {{{
+ ## generate the blanksky bkg spectrum by self
+ if [ "${USE_BLANKSKY}" = "YES" ]; then
+ # use blanksky as background file
+ printf "extract blanksky bkg spectrum ...\n"
+ punlearn dmextract
+ dmextract infile="${BLANKSKY}[sky=region(${REG_CIAO})][bin pi]" \
+ outfile=${RSPEC_BKG_PI} wmap="[bin det=8]" clobber=yes
+ elif [ "${USE_LBKG_REG}" = "YES" ] || [ "${USE_BKG_SPEC}" = "YES" ]; then
+ # use *local background* or specified background spectrum
+ cp -fv ${BKG_SPEC} ${RSPEC_BKG_PI}
+ fi
+ ## background }}}
+
+ ## bkg renormalization {{{
+ printf "Renormalize background ...\n"
+ bkg_renorm ${RSPEC_PI} ${RSPEC_BKG_PI}
+ ## bkg renorm }}}
+
+ ## group spectrum {{{
+ printf "group spectrum \`${RSPEC_PI}' using \`grppha'\n"
+ RSPEC_GRP_PI="${RSPEC_PI%.pi}_grp.pi"
+ grppha infile="${RSPEC_PI}" outfile="${RSPEC_GRP_PI}" \
+ comm="${GRP_CMD} & exit" clobber=yes > /dev/null
+ ## group }}}
+
+ ## `XFLT####' keywords for XSPEC model `projct' {{{
+ printf "update file headers ...\n"
+ punlearn dmhedit
+ if grep -qi 'pie' ${REG_TMP}; then
+ R_OUT="`awk -F',' '{ print $4 }' ${REG_TMP} | tr -d ')'`"
+ A_BEGIN="`awk -F',' '{ print $5 }' ${REG_TMP} | tr -d ')'`"
+ A_END="`awk -F',' '{ print $6 }' ${REG_TMP} | tr -d ')'`"
+ # RSPEC_PI
+ dmhedit infile=${RSPEC_PI} filelist=NONE operation=add \
+ key="XFLT0001" value=${R_OUT}
+ dmhedit infile=${RSPEC_PI} filelist=NONE operation=add \
+ key="XFLT0002" value=${R_OUT}
+ dmhedit infile=${RSPEC_PI} filelist=NONE operation=add \
+ key="XFLT0003" value=0
+ dmhedit infile=${RSPEC_PI} filelist=NONE operation=add \
+ key="XFLT0004" value=${A_BEGIN}
+ dmhedit infile=${RSPEC_PI} filelist=NONE operation=add \
+ key="XFLT0005" value=${A_END}
+ # RSPEC_GRP_PI
+ dmhedit infile=${RSPEC_GRP_PI} filelist=NONE operation=add \
+ key="XFLT0001" value=${R_OUT}
+ dmhedit infile=${RSPEC_GRP_PI} filelist=NONE operation=add \
+ key="XFLT0002" value=${R_OUT}
+ dmhedit infile=${RSPEC_GRP_PI} filelist=NONE operation=add \
+ key="XFLT0003" value=0
+ dmhedit infile=${RSPEC_GRP_PI} filelist=NONE operation=add \
+ key="XFLT0004" value=${A_BEGIN}
+ dmhedit infile=${RSPEC_GRP_PI} filelist=NONE operation=add \
+ key="XFLT0005" value=${A_END}
+ elif grep -qi 'annulus' ${REG_TMP}; then
+ R_OUT="`awk -F',' '{ print $4 }' ${REG_TMP} | tr -d ')'`"
+ # RSPEC_PI
+ dmhedit infile=${RSPEC_PI} filelist=NONE operation=add \
+ key="XFLT0001" value=${R_OUT}
+ dmhedit infile=${RSPEC_PI} filelist=NONE operation=add \
+ key="XFLT0002" value=${R_OUT}
+ dmhedit infile=${RSPEC_PI} filelist=NONE operation=add \
+ key="XFLT0003" value=0
+ # RSPEC_GRP_PI
+ dmhedit infile=${RSPEC_GRP_PI} filelist=NONE operation=add \
+ key="XFLT0001" value=${R_OUT}
+ dmhedit infile=${RSPEC_GRP_PI} filelist=NONE operation=add \
+ key="XFLT0002" value=${R_OUT}
+ dmhedit infile=${RSPEC_GRP_PI} filelist=NONE operation=add \
+ key="XFLT0003" value=0
+ else
+ printf "*** WARNING: region file NOT MATCH!!\n"
+ fi
+ ## `XFLT####' }}}
+
+done # end *for*, `specextract'
+## `specextract' }}}
+
+## clean
+printf "clean ...\n"
+rm -f ${REG_TMP} ${REG_CIAO} 2> /dev/null
+
+
+###########################################################
+## generate a script file for XSPEC ##
+###########################################################
+printf "generate scripts for XSPEC ...\n"
+## xspec script (deproj) {{{
+printf "XSPEC script for deprojection analysis\n"
+[ -e ${XSPEC_DEPROJ} ] && rm -fv ${XSPEC_DEPROJ}
+
+cat >> ${XSPEC_DEPROJ} << _EOF_
+#
+# XSPEC
+# radial spectra (deprojection analysis)
+# model projct*wabs*apec
+#
+# generated by script: \``basename $0`'
+# `date`
+
+statistic chi
+
+# load data
+_EOF_
+
+for i in `seq ${LINES}`; do
+ RSPEC="r${i}_${ROOTNAME}"
+ cat >> ${XSPEC_DEPROJ} << _EOF_
+data ${i}:${i} ${RSPEC}_grp.pi
+response 1:${i} ${RSPEC}.wrmf
+arf 1:${i} ${RSPEC}.warf
+backgrnd ${i} ${RSPEC}_bkg.pi
+
+_EOF_
+done
+
+cat >> ${XSPEC_DEPROJ} << _EOF_
+
+# filter needed energy range
+ignore bad
+ignore **:0.0-0.7,7.0-**
+
+method leven 1000 0.01
+# change abundance standard
+abund ${ABUND}
+xsect bcmc
+cosmo 70 0 0.73
+xset delta 0.01
+systematic 0
+# auto answer
+query yes
+
+# plot related
+setplot energy
+
+# model to use
+model projct*wabs*apec
+ 0
+ 0
+ 0
+ ${N_H} -0.001 0 0 100000 1e+06
+ 1 0.01 0.008 0.008 64 64
+ 0.5 0.001 0 0 5 5
+ ${REDSHIFT} -0.01 -0.999 -0.999 10 10
+ 1 0.01 0 0 1e+24 1e+24
+_EOF_
+
+INPUT_TIMES=`expr ${LINES} - 1`
+for i in `seq ${INPUT_TIMES}`; do
+ cat >> ${XSPEC_DEPROJ} << _EOF_
+= 1
+= 2
+= 3
+= 4
+ 1 0.01 0.008 0.008 64 64
+ 0.5 0.001 0 0 5 5
+= 7
+ 1 0.01 0 0 1e+24 1e+24
+_EOF_
+done
+## xspec script }}}
+
+###########################################################
+## xspec script (projected) {{{
+printf "XSPEC script for projected analysis\n"
+[ -e ${XSPEC_PROJTD} ] && rm -fv ${XSPEC_PROJTD}
+
+cat >> ${XSPEC_PROJTD} << _EOF_
+#
+# XSPEC
+# radial spectra (projected analysis)
+# model wabs*apec
+#
+# generated by script: \``basename $0`'
+# `date`
+
+statistic chi
+
+# load data
+_EOF_
+
+for i in `seq ${LINES}`; do
+ RSPEC="r${i}_${ROOTNAME}"
+ cat >> ${XSPEC_PROJTD} << _EOF_
+data ${i}:${i} ${RSPEC}_grp.pi
+response 1:${i} ${RSPEC}.wrmf
+arf 1:${i} ${RSPEC}.warf
+backgrnd ${i} ${RSPEC}_bkg.pi
+
+_EOF_
+done
+
+cat >> ${XSPEC_PROJTD} << _EOF_
+
+# filter needed energy range
+ignore bad
+ignore **:0.0-0.7,7.0-**
+
+method leven 1000 0.01
+# change abundance standard
+abund ${ABUND}
+xsect bcmc
+cosmo 70 0 0.73
+xset delta 0.01
+systematic 0
+# auto answer
+query yes
+
+# plot related
+setplot energy
+
+# model to use
+model wabs*apec
+ ${N_H} -0.001 0 0 100000 1e+06
+ 1 0.01 0.008 0.008 64 64
+ 0.5 0.001 0 0 5 5
+ ${REDSHIFT} -0.01 -0.999 -0.999 10 10
+ 1 0.01 0 0 1e+24 1e+24
+_EOF_
+
+INPUT_TIMES=`expr ${LINES} - 1`
+for i in `seq ${INPUT_TIMES}`; do
+ cat >> ${XSPEC_PROJTD} << _EOF_
+= 1
+ 1 0.01 0.008 0.008 64 64
+ 0.5 0.001 0 0 5 5
+= 4
+ 1 0.01 0 0 1e+24 1e+24
+_EOF_
+done
+
+printf "DONE\n"
+## xspec script }}}
+###########################################################
+
+printf "ALL FINISHED\n"
+
+# vim: set ts=8 sw=4 tw=0 fenc=utf-8 ft=sh: #
diff --git a/scripts/ciao_expcorr_only.sh b/scripts/ciao_expcorr_only.sh
new file mode 100755
index 0000000..526540d
--- /dev/null
+++ b/scripts/ciao_expcorr_only.sh
@@ -0,0 +1,358 @@
+#!/bin/sh
+#
+unalias -a
+export LC_COLLATE=C
+###########################################################
+## make `image' from `evt file' ##
+## make `spectral weight' using `make_instmap_weights' ##
+## use `merge_all' to generating `exposure map' ##
+## make `exposure-corrected' image ##
+## and extract `surface brighness profile' ##
+## ##
+## NOTES: ##
+## only ACIS-I (chip: 0-3) and ACIS-S (chip: 7) supported##
+## `merge_all' conflict with Heasoft's `pget' etc. tools ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## August 16, 2012 ##
+###########################################################
+
+###########################################################
+## ChangeLogs:
+## v1.1, 2012-08-21, LIweitiaNux
+## fix a bug with `sed'
+## v1.2, 2012-08-21, LIweitiaNux
+## set `ardlib' before process `merge_all'
+###########################################################
+
+## about, used in `usage' {{{
+VERSION="v1.2"
+UPDATE="2012-08-21"
+## about }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+ERR_DET=41
+ERR_ENG=42
+ERR_CIAO=100
+## error code }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt_file> energy=<e_start:e_end:e_width> basedir=<base_dir> nh=<nH> z=<redshift> temp=<avg_temperature> abund=<avg_abund> [ logfile=<log_file> ]\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default `event file' which used to match `blanksky' files
+#DFT_EVT="_NOT_EXIST_"
+DFT_EVT="`ls evt2*_clean.fits 2> /dev/null`"
+# default dir which contains `asols, asol.lis, ...' files
+# DFT_BASEDIR="_NOT_EXIST_"
+DFT_BASEDIR=".."
+# default `radial region file' to extract surface brightness
+DFT_SBP_REG="_NOT_EXIST_"
+#DFT_SBP_REG="sbprofile.reg"
+# default `energy band' applying to `images' analysis
+# format: `E_START:E_END:E_WIDTH'
+DFT_ENERGY="700:7000:100"
+
+# default `log file'
+DFT_LOGFILE="expcorr_sbp_`date '+%Y%m%d'`.log"
+
+## howto find files in `basedir'
+# default `asol.lis pattern'
+DFT_ASOLIS_PAT="acis*asol?.lis"
+# default `bad pixel filename pattern'
+DFT_BPIX_PAT="acis*repro*bpix?.fits"
+## default parameters }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+## functions }}}
+
+## check CIAO init {{{
+if [ -z "${ASCDS_INSTALL}" ]; then
+ printf "ERROR: CIAO NOT initialized\n"
+ exit ${ERR_CIAO}
+fi
+
+## XXX: heasoft's `pget' etc. tools conflict with some CIAO tools
+printf "set \$PATH to avoid conflicts between HEAsoft and CIAO\n"
+export PATH="${ASCDS_BIN}:${ASCDS_CONTRIB}:${PATH}"
+printf "## PATH: ${PATH}\n"
+## check CIAO }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+## check log parameters {{{
+if [ ! -z "${log}" ]; then
+ LOGFILE="${log}"
+else
+ LOGFILE=${DFT_LOGFILE}
+fi
+printf "## use logfile: \`${LOGFILE}'\n"
+[ -e "${LOGFILE}" ] && mv -fv ${LOGFILE} ${LOGFILE}_bak
+TOLOG="tee -a ${LOGFILE}"
+echo "process script: `basename $0`" >> ${LOGFILE}
+echo "process date: `date`" >> ${LOGFILE}
+## log }}}
+
+# check given parameters
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "clean evt2 file: " EVT
+ if [ ! -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n" | ${TOLOG}
+# check given region file(s)
+if [ -r "${reg}" ]; then
+ SBP_REG="${reg}"
+elif [ -r "${DFT_SBP_REG}" ]; then
+ SBP_REG=${DFT_SBP_REG}
+fi
+printf "## use reg file(s): \`${SBP_REG}'\n" | ${TOLOG}
+## check given `energy' {{{
+if [ ! -z "${energy}" ]; then
+ ENERGY="${energy}"
+else
+ ENERGY="${DFT_ENERGY}"
+fi
+# split energy variable
+ENG_N=`echo ${ENERGY} | awk -F':' '{ print NF }'`
+if [ ${ENG_N} -eq 1 ]; then
+ E_START=`echo ${DFT_ENERGY} | awk -F':' '{ print $1 }'`
+ E_END=`echo ${DFT_ENERGY} | awk -F':' '{ print $2 }'`
+ E_WIDTH=${ENERGY}
+elif [ ${ENG_N} -eq 2 ]; then
+ E_START=`echo ${ENERGY} | awk -F':' '{ print $1 }'`
+ E_END=`echo ${ENERGY} | awk -F':' '{ print $2 }'`
+ E_WIDTH=`echo ${DFT_ENERGY} | awk -F':' '{ print $3 }'`
+elif [ ${ENG_N} -eq 3 ]; then
+ E_START=`echo ${ENERGY} | awk -F':' '{ print $1 }'`
+ E_END=`echo ${ENERGY} | awk -F':' '{ print $2 }'`
+ E_WIDTH=`echo ${ENERGY} | awk -F':' '{ print $3 }'`
+else
+ printf "ERROR: invalid energy: \`${ENERGY}'\n"
+ exit ${ERR_ENG}
+fi
+ENG_RANGE="${E_START}:${E_END}"
+printf "## use energy range: \`${ENG_RANGE}' eV\n" | ${TOLOG}
+printf "## use energy width: \`${E_WIDTH}' eV\n" | ${TOLOG}
+## parse energy }}}
+# parameters (nH, z, avg_temp, avg_abund) used to calculate {{{
+# `spectral weights' for `mkinstmap'
+# check given nH
+if [ -z "${nh}" ]; then
+ read -p "> value of nH: " N_H
+else
+ N_H=${nh}
+fi
+printf "## use nH: ${N_H}\n" | ${TOLOG}
+# check given redshift
+if [ -z "${z}" ]; then
+ read -p "> value of redshift: " REDSHIFT
+else
+ REDSHIFT=${z}
+fi
+printf "## use redshift: ${REDSHIFT}\n" | ${TOLOG}
+# check given temperature
+if [ -z "${temp}" ]; then
+ read -p "> object average temperature: " TEMP
+else
+ TEMP=${temp}
+fi
+printf "## use temperature: ${TEMP}\n" | ${TOLOG}
+# check given abundance
+if [ -z "${abund}" ]; then
+ read -p "> object average abundance: " ABUND
+else
+ ABUND=${abund}
+fi
+printf "## use abundance: ${ABUND}\n" | ${TOLOG}
+# `spectral weights' parameters }}}
+# check given dir
+if [ -d "${basedir}" ]; then
+ BASEDIR=${basedir}
+elif [ -d "${DFT_BASEDIR}" ]; then
+ BASEDIR=${DFT_BASEDIR}
+else
+ read -p "> basedir (contains asol files): " BASEDIR
+ if [ ! -d "${BASEDIR}" ]; then
+ printf "ERROR: given \`${BASEDIR}' NOT a directory\n"
+ exit ${ERR_DIR}
+ fi
+fi
+# remove the trailing '/'
+BASEDIR=`echo ${BASEDIR} | sed 's/\/*$//'`
+printf "## use basedir: \`${BASEDIR}'\n" | ${TOLOG}
+## parameters }}}
+
+## check files in `basedir' {{{
+# check asol files
+ASOLIS=`ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
+if [ -z "${ASOLIS}" ]; then
+ printf "ERROR: cannot find \"${DFT_ASOLIS_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_ASOL}
+fi
+printf "## use asolis: \`${ASOLIS}'\n"
+## XXX: `merge_all' needs `asol files' in working directory
+printf "link asol files into currect dir (\`merge_all' needed) ...\n"
+for f in `cat ${ASOLIS}`; do
+ ln -sv ${BASEDIR}/${f} .
+done
+# check badpixel file
+BPIX=`ls -1 ${BASEDIR}/${DFT_BPIX_PAT} | head -n 1`
+if [ -z "${BPIX}" ]; then
+ printf "ERROR: cannot find \"${DFT_BPIX_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_BPIX}
+fi
+printf "## use badpixel: \`${BPIX}'\n" | ${TOLOG}
+## check files }}}
+
+## determine ACIS type {{{
+# consistent with `ciao_procevt'
+punlearn dmkeypar
+DETNAM=`dmkeypar ${EVT} DETNAM echo=yes`
+if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+ printf "## \`DETNAM' (${DETNAM}) has chips 0123\n"
+ printf "## ACIS-I\n"
+ ACIS_TYPE="ACIS-I"
+ CCD="0:3"
+ NEW_DETNAM="ACIS-0123"
+ ROOTNAME="c0-3_e${E_START}-${E_END}"
+elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+ printf "## \`DETNAM' (${DETNAM}) has chip 7\n"
+ printf "## ACIS-S\n"
+ ACIS_TYPE="ACIS-S"
+ CCD="7"
+ NEW_DETNAM="ACIS-7"
+ ROOTNAME="c7_e${E_START}-${E_END}"
+else
+ printf "ERROR: unknown detector type: ${DETNAM}\n"
+ exit ${ERR_DET}
+fi
+## ACIS type }}}
+
+
+## main process {{{
+## set `ardlib' at first
+printf "set \`ardlib' first ...\n"
+punlearn ardlib
+acis_set_ardlib badpixfile="${BPIX}"
+
+## generate `spectral weights' for `instrumental map' {{{
+printf "generate \`spectral weights' for making instrumental map ...\n"
+SPEC_WGT="instmap_weights.txt"
+# convert `eV' to `keV'
+EMIN=`echo ${E_START} | awk '{ print $1/1000 }'`
+EMAX=`echo ${E_END} | awk '{ print $1/1000 }'`
+EWIDTH=`echo ${E_WIDTH} | awk '{ print $1/1000 }'`
+punlearn make_instmap_weights
+make_instmap_weights outfile="${SPEC_WGT}" \
+ model="xswabs.gal*xsapec.ap" \
+ paramvals="gal.nh=${N_H};ap.kt=${TEMP};ap.abundanc=${ABUND};ap.redshift=${REDSHIFT}" \
+ emin="${EMIN}" emax="${EMAX}" ewidth="${EWIDTH}" \
+ abund="grsa" clobber=yes
+## spectral weights }}}
+
+## generate `skyfov'
+# XXX: omit `aspec', NOT provide `asol' file
+# otherwise the size of evt_img NOT match the `expmap'
+printf "generate skyfov ...\n"
+SKYFOV="_skyfov.fits"
+[ -e "${SKYFOV}" ] && rm -fv ${SKYFOV}
+punlearn skyfov
+skyfov infile="${EVT}" outfile="${SKYFOV}" clobber=yes
+
+## filter by energy band & make image
+printf "filter out events in energy band: \`${ENG_RANGE}' ...\n"
+EVT_E="evt2_${ROOTNAME}.fits"
+if [ ! -r "${EVT_E}" ]; then
+ punlearn dmcopy
+ dmcopy infile="${EVT}[energy=${E_START}:${E_END}]" outfile="${EVT_E}" clobber=yes
+fi
+
+printf "make image ...\n"
+IMG_ORIG="img_${ROOTNAME}.fits"
+punlearn dmcopy
+dmcopy infile="${EVT_E}[sky=region(${SKYFOV}[ccd_id=${CCD}])][bin sky=1]" \
+ outfile="${IMG_ORIG}" clobber=yes
+
+## modify `DETNAM' of image
+printf "modify keyword \`DETNAM' of image -> \`${NEW_DETNAM}'\n"
+punlearn dmhedit
+dmhedit infile="${IMG_ORIG}" filelist=none operation=add \
+ key=DETNAM value="${NEW_DETNAM}"
+
+## get `xygrid' for image
+punlearn get_sky_limits
+get_sky_limits image="${IMG_ORIG}"
+XYGRID=`pget get_sky_limits xygrid`
+printf "## get \`xygrid': \`${XYGRID}'\n" | ${TOLOG}
+
+## for simplicity, use `merge_all' to generating `exposure map' {{{
+## set `ardlib' again to make sure the matched bpix file specified
+printf "set \`ardlib' again for \`merge_all' ...\n"
+punlearn ardlib
+acis_set_ardlib badpixfile="${BPIX}"
+
+printf "use \`merge_all' to generate \`exposure map' ONLY ...\n"
+EXPMAP="expmap_${ROOTNAME}.fits"
+punlearn merge_all
+merge_all evtfile="${EVT_E}" asol="@${ASOLIS}" \
+ chip="${CCD}" xygrid="${XYGRID}" \
+ energy="${SPEC_WGT}" expmap="${EXPMAP}" \
+ dtffile="" refcoord="" merged="" expcorr="" \
+ clobber=yes
+## `merge_all' }}}
+
+## apply exposure correction
+printf "use \`dmimgcalc' to apply \`exposure correction' ...\n"
+IMG_EXPCORR="img_expcorr_${ROOTNAME}.fits"
+punlearn dmimgcalc
+dmimgcalc infile="${IMG_ORIG}" infile2="${EXPMAP}" \
+ outfile="${IMG_EXPCORR}" operation=div clobber=yes
+
+
+## main }}}
+
+exit 0
+
diff --git a/scripts/ciao_genregs_v1.sh b/scripts/ciao_genregs_v1.sh
new file mode 100755
index 0000000..643acba
--- /dev/null
+++ b/scripts/ciao_genregs_v1.sh
@@ -0,0 +1,277 @@
+#!/bin/sh
+unalias -a
+export LC_COLLATE=C
+
+#####################################################################
+## generate 'radius spectra profile' regions
+## and 'radius surface profile' regions
+##
+## ChangeLogs:
+## v1, 2013/10/12, LIweitiaNux
+## split from 'ciao_expcorr_sbp_v3.sh'
+#####################################################################
+
+SCRIPT_PATH=`readlink -f $0`
+SCRIPT_DIR=`dirname ${SCRIPT_PATH}`
+XCENTROID_SCRIPT="chandra_xcentroid.sh"
+GEN_SPCREG_SCRIPT="chandra_genspcreg.sh"
+GEN_SBPREG_SCRIPT="chandra_gensbpreg.sh"
+
+## about, used in `usage' {{{
+VERSION="v1"
+UPDATE="2013-10-12"
+## about }}}
+
+## err code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+ERR_DET=41
+ERR_ENG=42
+ERR_CIAO=100
+## error code }}}
+
+## usage {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt2_clean> reg_in=<reg_in> bkgd=<bkgd_spec> ds9=<Yes|no>\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage }}}
+
+## link needed files {{{
+BKGD_FILE=`ls ../bkg/bkgcorr_bl*.pi 2> /dev/null | head -n 1`
+if [ -r "${BKGD_FILE}" ]; then
+ ln -svf ${BKGD_FILE} .
+fi
+ASOL_FILE=`ls ../pcad*_asol?.fits 2> /dev/null`
+if [ -r "${ASOL_FILE}" ]; then
+ ln -svf ${ASOL_FILE} .
+fi
+CELL_REG_FILE=`ls ../evt/celld*.reg 2> /dev/null | grep -v 'orig'`
+if [ -r "${CELL_REG_FILE}" ]; then
+ ln -svf ${CELL_REG_FILE} .
+fi
+# }}}
+
+## default parameters {{{
+## clean evt2 file
+DFT_EVT=`ls evt2*_clean.fits 2> /dev/null`
+## the repro dir
+DFT_BASEDIR=".."
+# default `asol file'
+ASOL="`ls pcadf*_asol1.fits 2> /dev/null | head -n 1`"
+
+## energy range
+# format: `E_START:E_END:E_WIDTH'
+DFT_ENERGY=700:7000:100
+E_START=`echo ${DFT_ENERGY} | awk -F':' '{ print $1 }'`
+E_END=`echo ${DFT_ENERGY} | awk -F':' '{ print $2 }'`
+
+## log file
+DFT_LOGFILE="genreg_`date '+%Y%m%d'`.log"
+
+## background spectra
+DFT_BKGD=`ls bkgcorr_bl*.pi | head -n 1`
+## default parameters }}}
+
+## functions {{{
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+## functions }}}
+
+## check ciao init and set path to solve conflit with heasoft {{{
+if [ -z "${ASCDS_INSTALL}" ]; then
+ printf "ERROR: CIAO NOT initialized\n"
+ exit ${ERR_CIAO}
+fi
+
+## XXX: heasoft's `pget' etc. tools conflict with some CIAO tools
+printf "set \$PATH to avoid conflicts between HEAsoft and CIAO\n"
+export PATH="${ASCDS_BIN}:${ASCDS_CONTRIB}:${PATH}"
+printf "## PATH: ${PATH}\n"
+## check ciao&heasoft}}}
+
+## parameters {{{
+getopt_keyval "$@"
+
+##log file
+LOGFILE="not_exist"
+
+##check evt file
+
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "clean evt2 file: " EVT
+ if [ ! -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+
+##check ori sbp region
+if [ -r "${reg_in}" ]; then
+ REG_IN="${reg_in}"
+else
+ read -p "input region file: " REG_IN
+ if [ ! -r "${REG_IN}" ]; then
+ printf "ERROR: cannot access given \`${REG_IN}' evt file\n"
+ exit ${ERR_REG}
+ fi
+fi
+
+# check give basedir
+if [ -d "${basedir}" ]; then
+ BASEDIR=${basedir}
+elif [ -d "${DFT_BASEDIR}" ]; then
+ BASEDIR=${DFT_BASEDIR}
+else
+ read -p "> basedir (contains asol files): " BASEDIR
+ if [ ! -d "${BASEDIR}" ]; then
+ printf "ERROR: given \`${BASEDIR}' NOT a directory\n"
+ exit ${ERR_DIR}
+ fi
+fi
+# remove the trailing '/'
+BASEDIR=`echo ${BASEDIR} | sed 's/\/*$//'`
+
+#background spectrum
+if [ -r "${bkgd}" ] ;then
+ BKGD=${bkgd}
+elif [ -r "${DFT_BKGD}" ] ; then
+ BKGD="${DFT_BKGD}"
+ #ln -svf ${DFT_BKGD} .
+else
+ read -p ">background spectrum file: " BKGD
+ if [ ! -d ${BKGD} ] ; then
+ printf "ERROR on background spectrum file"
+ exit ${ERR_BKG}
+ fi
+fi
+
+## ds9: flag to determine whether to use ds9 check centroid and regions
+if [ ! -z "${ds9}" ]; then
+ case "${ds9}" in
+ [nN][oO]|[fF]*)
+ F_DS9="NO"
+ ;;
+ *)
+ F_DS9="YES"
+ ;;
+ esac
+else
+ F_DS9="YES"
+fi
+## parameters }}}
+
+## determine ACIS type {{{
+# consistent with `ciao_procevt'
+punlearn dmkeypar
+DETNAM=`dmkeypar ${EVT} DETNAM echo=yes`
+if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+ printf "## \`DETNAM' (${DETNAM}) has chips 0123\n"
+ printf "## ACIS-I\n"
+ ACIS_TYPE="ACIS-I"
+ CCD="0:3"
+ NEW_DETNAM="ACIS-0123"
+ ROOTNAME="c0-3_e${E_START}-${E_END}"
+elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+ printf "## \`DETNAM' (${DETNAM}) has chip 7\n"
+ printf "## ACIS-S\n"
+ ACIS_TYPE="ACIS-S"
+ CCD="7"
+ NEW_DETNAM="ACIS-7"
+ ROOTNAME="c7_e${E_START}-${E_END}"
+else
+ printf "ERROR: unknown detector type: ${DETNAM}\n"
+ exit ${ERR_DET}
+fi
+## ACIS type }}}
+
+## filter by energy band
+printf "filter out events in energy band: \`${E_START}:${E_END}' ...\n"
+EVT_E="evt2_${ROOTNAME}.fits"
+if [ ! -r "${EVT_E}" ]; then
+ punlearn dmcopy
+ dmcopy infile="${EVT}[energy=${E_START}:${E_END}]" outfile="${EVT_E}" clobber=yes
+fi
+
+printf "======== X-RAY CENTROID =======\n"
+CMD="${SCRIPT_DIR}/${XCENTROID_SCRIPT} evt=${EVT} reg=${REG_IN} conv=yes 2>&1 | tee xcentroid.dat"
+printf "CMD: $CMD\n"
+${SCRIPT_DIR}/${XCENTROID_SCRIPT} evt=${EVT} reg=${REG_IN} conv=yes 2>&1 | tee xcentroid.dat
+X=`grep '(X,Y)' xcentroid.dat | tr -d ' XY():' | awk -F',' '{ print $2 }'`
+Y=`grep '(X,Y)' xcentroid.dat | tr -d ' XY():' | awk -F',' '{ print $3 }'`
+CNTRD_WCS_REG="centroid_wcs.reg"
+CNTRD_PHY_REG="centroid_phy.reg"
+printf "## X centroid: ($X,$Y)\n"
+if [ "${F_DS9}" = "YES" ]; then
+ printf "check the X centroid ...\n"
+ ds9 ${EVT_E} -regions ${CNTRD_PHY_REG} -cmap sls -bin factor 4
+fi
+X0=$X
+Y0=$Y
+X=`grep -i 'point' ${CNTRD_PHY_REG} | head -n 1 | tr -d 'a-zA-Z() ' | awk -F',' '{ print $1 }'`
+Y=`grep -i 'point' ${CNTRD_PHY_REG} | head -n 1 | tr -d 'a-zA-Z() ' | awk -F',' '{ print $2 }'`
+if [ "x${X}" != "x${X0}" ] || [ "x${Y}" != "x${Y0}" ]; then
+ printf "## X CENTROID CHANGED -> ($X,$Y)\n"
+ # update ${CNTRD_WCS_REG}
+ printf "update ${CNTRD_WCS_REG} ...\n"
+ rm -f ${CNTRD_WCS_REG}
+ punlearn dmcoords
+ dmcoords infile="${EVT}" asolfile="${ASOL}" option=sky x=${X} y=${Y}
+ RA=`pget dmcoords ra`
+ DEC=`pget dmcoords dec`
+ echo "point(${RA},${DEC})" > ${CNTRD_WCS_REG}
+fi
+printf "======== X-RAY CENTROID FINISHED =======\n\n"
+
+SPC_REG=rspec.reg
+SBP_REG=sbprofile.reg
+
+printf "======== GENERATE SBPROFILE REGIONS =======\n"
+CMD="${SCRIPT_DIR}/${GEN_SBPREG_SCRIPT} ${EVT} ${EVT_E} ${X} ${Y} ${BKGD} ${SBP_REG}"
+printf "CMD: ${CMD}\n"
+${SCRIPT_DIR}/${GEN_SBPREG_SCRIPT} ${EVT} ${EVT_E} ${X} ${Y} ${BKGD} ${SBP_REG}
+if [ "${F_DS9}" = "YES" ]; then
+ printf "check SBP regions ...\n"
+ ds9 ${EVT_E} -regions ${SBP_REG} -cmap sls -bin factor 4
+fi
+printf "======== GENERATE SBPROFILE REGIONS FINISHED =======\n\n"
+
+printf "======== GENERATE SPECTRUM REGIONS =======\n"
+CMD="${SCRIPT_DIR}/${GEN_SPCREG_SCRIPT} ${EVT} ${EVT_E} ${BKGD} ${X} ${Y} ${SPC_REG}"
+printf "CMD: ${CMD}\n"
+${SCRIPT_DIR}/${GEN_SPCREG_SCRIPT} ${EVT} ${EVT_E} ${BKGD} ${X} ${Y} ${SPC_REG}
+if [ "${F_DS9}" = "YES" ]; then
+ printf "check SPC regions ...\n"
+ ds9 ${EVT_E} -regions ${SPC_REG} -cmap sls -bin factor 4
+fi
+printf "======== GENERATE SPECTRUM REGIONS FINISHED =======\n\n"
+
+exit 0
+
diff --git a/scripts/ciao_procevt_v2.sh b/scripts/ciao_procevt_v2.sh
new file mode 100755
index 0000000..6cfe45d
--- /dev/null
+++ b/scripts/ciao_procevt_v2.sh
@@ -0,0 +1,182 @@
+#!/bin/sh
+#
+###########################################################
+## process `evt2 file' generated by `chandra_repro' ##
+## to produce a `clean evt2 file' ##
+## (remove point source and flares) ##
+## for later image and spectral analysis ##
+## ##
+## NOTES: ##
+## based on previous `ciao_process_evt2.sh' ##
+## support ACIS-I(chip: 0-3) and ACIS-S(chip: 7) ##
+## determine by check `DETNAM' for chip number ##
+## if `DETNAM' has `0123', then `ACIS-I' ##
+## if `DETNAM' has `7', then `ACIS-S' ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## August 16, 2012 ##
+###########################################################
+
+###########################################################
+## ChangeLogs:
+## v2.1, 2012/08/16, LIweitiaNux
+## improve invoke `chips', run it in a separate terminal
+###########################################################
+
+## about, used in `usage' {{{
+VERSION="v2"
+UPDATE="2012-08-16"
+## about }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+ERR_DET=41
+## error code }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<raw_evt_file>\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default `event file' which used to match `blanksky' files
+#DFT_EVT="_NOT_EXIST_"
+DFT_EVT="`ls acisf?????*_repro_evt2.fits 2> /dev/null`"
+## default parameters }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+## functions }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+# check given parameters
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "evt2 file: " EVT
+ if ! [ -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n"
+## parameters }}}
+
+## determine ACIS type {{{
+punlearn dmkeypar
+DETNAM=`dmkeypar ${EVT} DETNAM echo=yes`
+if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+ printf "## \`DETNAM' (${DETNAM}) has chips 0123\n"
+ printf "## ACIS-I\n"
+ ACIS_TYPE="ACIS-I"
+ CCD="0:3"
+ ROOTNAME="evt2_c`echo ${CCD} | tr ':' '-'`"
+elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+ printf "## \`DETNAM' (${DETNAM}) has chip 7\n"
+ printf "## ACIS-S\n"
+ ACIS_TYPE="ACIS-S"
+ CCD="7"
+ ROOTNAME="evt2_c${CCD}"
+else
+ printf "ERROR: unknown detector type: ${DETNAM}\n"
+ exit ${ERR_DET}
+fi
+## ACIS type }}}
+
+## main process {{{
+printf "filter raw evt2 file by ccd_id ...\n"
+EVT2_ORIG="${ROOTNAME}_orig.fits"
+punlearn dmcopy
+dmcopy infile="${EVT}[ccd_id=${CCD}]" outfile=${EVT2_ORIG} clobber=yes
+
+# detect point sources
+printf "using \`celldetect' to find the point sources ...\n"
+CELLD="celld_${ROOTNAME}"
+[ -e "${CELLD}.reg" ] && mv -fv ${CELLD}.reg ${CELLD}.reg_bak
+punlearn celldetect
+celldetect infile=${EVT2_ORIG} outfile="${CELLD}.fits" \
+ regfile="${CELLD}.reg" clobber=yes
+
+printf "check the result of \`celldetect' ...\n"
+printf "modify if necessary and save as the same name, \`${CELLD}.reg'\n"
+cp -fv ${CELLD}.reg ${CELLD}_orig.reg
+ds9 ${EVT2_ORIG} -region ${CELLD}.reg
+
+EVT2_RMSRCS="${ROOTNAME}_rmsrcs.fits"
+punlearn dmcopy
+dmcopy infile="${EVT2_ORIG}[exclude sky=region(${CELLD}.reg)]" \
+ outfile=${EVT2_RMSRCS} clobber=yes
+
+LC_REG="ex_bkg.reg"
+printf "filter flares ...\n"
+printf "select a big source region and save as \`${LC_REG}'\n"
+touch ${LC_REG}
+ds9 ${EVT2_RMSRCS}
+
+printf "create the lightcurve ...\n"
+LC="${LC_REG%.reg}.lc"
+[ -e "${LC}" ] && mv -fv ${LC} ${LC}_bak
+punlearn dmextract
+dmextract infile="${EVT2_RMSRCS}[exclude sky=region(${LC_REG})][bin time=::200]" \
+ outfile="${LC}" opt=ltc1 clobber=yes
+
+# generate a script for `chips'
+# for convenience and backup
+LC_SCRIPT="_${LC%.lc}.chips"
+LC_SCALE=1.2
+GTI="${LC%.lc}.gti"
+cat >> ${LC_SCRIPT} << _EOF_
+from lightcurves import *
+lc_clean("${LC}")
+lc_clean("${LC}", scale=${LC_SCALE}, outfile="${GTI}")
+print_window("${LC%.lc}_lc.jpg", ["format", "jpg", "clobber", "True"])
+_EOF_
+
+printf "generate GTI ...\n"
+# [-x] let chips run in a separate terminal
+chips -x ${LC_SCRIPT}
+
+printf "remove the background flare ...\n"
+EVT2_CLEAN="${ROOTNAME}_clean.fits"
+punlearn dmcopy
+dmcopy infile="${EVT2_RMSRCS}[@${GTI}]" outfile=${EVT2_CLEAN} clobber=yes
+## main process }}}
+
+printf "FINISHED\n"
+
+
diff --git a/scripts/ciao_r500avgt_v3.sh b/scripts/ciao_r500avgt_v3.sh
new file mode 100755
index 0000000..3c9be08
--- /dev/null
+++ b/scripts/ciao_r500avgt_v3.sh
@@ -0,0 +1,539 @@
+#!/bin/sh
+#
+unalias -a
+export LC_COLLATE=C
+###########################################################
+## script to extract spectrum and prepare needed files ##
+## for calculating the `average temperature' ##
+## within (0.1-0.5 r500) region ##
+## ##
+## NOTE: ##
+## 1) r500 default in unit `pixel', if in `kpc', ##
+## then `redshift z' and `calculator' are needed. ##
+## 2) `background process' is same as `deproj_spectra' ##
+## which supports `spectrum', `local' and `blanksky' ##
+## 3) ARF/RMF files either provided or use the ARF/RMF ##
+## of the outmost region ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## August 22, 2012 ##
+###########################################################
+
+###########################################################
+## ChangeLogs
+## v1.1, 2012/08/26, LIweitiaNux
+## modify `KPC_PER_PIX', able to use the newest version `calc_distance'
+## v1.2, 2012/08/26, LIweitiaNux
+## fix a bug with `DFT_BKGD'
+## v2.0, 2012/09/04, LIweitiaNux
+## add parameter `inner' and `outer' to adjust the region range
+## modify parameter `r500' to take `kpc' as the default unit
+## v2.1, 2012/10/05, LIweitiaNux
+## change `DFT_GRP_CMD' to `group 1 128 4 ...'
+## v3.0, 2013/02/09, LIweitiaNux
+## modify for new process
+###########################################################
+
+## comology calculator {{{
+## XXX: MODIFY THIS TO YOUR OWN CASE
+## and make sure this `calc' is executable
+## NOTES: use `$HOME' instead of `~' in path
+COSCALC="`which cosmo_calc calc_distance | head -n 1`"
+# COSCALC="_path_to_calc_distance_"
+# COSCALC="$HOME/bin/mass/calc_distance"
+if [ -z "${COSCALC}" ] || [ ! -x ${COSCALC} ]; then
+ printf "ERROR: \`COSCALC: ${COSCALC}' neither specified nor executable\n"
+ exit 255
+fi
+## }}}
+
+## about, used in `usage' {{{
+VERSION="v3.0"
+UPDATE="2013-02-09"
+## about }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_JSON=15
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+ERR_ARF=51
+ERR_RMF=52
+ERR_UNI=61
+## error code }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt2_clean> r500=<r500_kpc> basedir=<basedir> info=<info_json> inner=<inner_val> outer=<outer_val> regin=<input_reg> regout=<output_reg> bkgd=<blank_evt|lbkg_reg|bkg_spec> nh=<nH> z=<redshift> arf=<warf_file> rmf=<wrmf_file> [ grpcmd=<grppha_cmd> log=<log_file> ]\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default `event file' which used to match `blanksky' files
+#DFT_EVT="_NOT_EXIST_"
+DFT_EVT="`ls evt2*_clean.fits 2> /dev/null`"
+# default `bkgd', use `bkgcorr_blanksky*' corrected bkg spectrum
+DFT_BKGD="`ls bkgcorr_blanksky_*.pi 2> /dev/null`"
+# default basedir
+DFT_BASEDIR="../.."
+# default `radial region file'
+#DFT_REG_IN="_NOT_EXIST_"
+DFT_REG_IN="rspec.reg"
+# default region range (0.1-0.5 R500)
+DFT_INNER="0.1"
+DFT_OUTER="0.5"
+# default ARF/RMF, the one of the outmost region
+DFT_ARF="`ls -1 r?_*.warf 2> /dev/null | tail -n 1`"
+DFT_RMF="`ls -1 r?_*.wrmf 2> /dev/null | tail -n 1`"
+
+# default `group command' for `grppha'
+DFT_GRP_CMD="group min 25"
+#DFT_GRP_CMD="group 1 128 2 129 256 4 257 512 8 513 1024 16"
+#DFT_GRP_CMD="group 1 128 4 129 256 8 257 512 16 513 1024 32"
+# default `log file'
+DFT_LOGFILE="r500avgt_`date '+%Y%m%d%H'`.log"
+
+# default JSON pattern
+DFT_JSON_PAT="*_INFO.json"
+## default parameters }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+
+## background renormalization (BACKSCAL) {{{
+# renorm background according to particle background
+# energy range: 9.5-12.0 keV (channel: 651-822)
+CH_LOW=651
+CH_HI=822
+pb_flux() {
+ punlearn dmstat
+ COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | grep -i 'sum:' | awk '{ print $2 }'`
+ punlearn dmkeypar
+ EXPTIME=`dmkeypar $1 EXPOSURE echo=yes`
+ BACK=`dmkeypar $1 BACKSCAL echo=yes`
+ # fix `scientific notation' bug for `bc'
+ EXPTIME_B=`echo ${EXPTIME} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_B=`echo "( ${BACK} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ PB_FLUX=`echo "scale = 16; ${COUNTS} / ${EXPTIME_B} / ${BACK_B}" | bc -l`
+ echo ${PB_FLUX}
+}
+
+bkg_renorm() {
+ # $1: src spectrum, $2: back spectrum
+ PBFLUX_SRC=`pb_flux $1`
+ PBFLUX_BKG=`pb_flux $2`
+ BACK_OLD=`dmkeypar $2 BACKSCAL echo=yes`
+ BACK_OLD_B=`echo "( ${BACK_OLD} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_NEW=`echo "scale = 16; ${BACK_OLD_B} * ${PBFLUX_BKG} / ${PBFLUX_SRC}" | bc -l`
+ printf "\`$2': BACKSCAL:\n"
+ printf " ${BACK_OLD} --> ${BACK_NEW}\n"
+ punlearn dmhedit
+ dmhedit infile=$2 filelist=none operation=add \
+ key=BACKSCAL value=${BACK_NEW} comment="old value: ${BACK_OLD}"
+}
+## bkg renorm }}}
+## functions end }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+## check log parameters {{{
+if [ ! -z "${log}" ]; then
+ LOGFILE="${log}"
+else
+ LOGFILE=${DFT_LOGFILE}
+fi
+printf "## use logfile: \`${LOGFILE}'\n"
+[ -e "${LOGFILE}" ] && mv -fv ${LOGFILE} ${LOGFILE}_bak
+TOLOG="tee -a ${LOGFILE}"
+echo "process script: `basename $0`" >> ${LOGFILE}
+echo "process date: `date`" >> ${LOGFILE}
+## log }}}
+
+# check given parameters
+if [ -d "${basedir}" ]; then
+ BASEDIR=${basedir}
+else
+ BASEDIR=${DFT_BASEDIR}
+fi
+if [ ! -z "${json}" ] && [ -r "${BASEDIR}/${json}" ]; then
+ JSON_FILE="${BASEDIR}/${json}"
+elif ls ${BASEDIR}/${DFT_JSON_PAT} > /dev/null 2>&1; then
+ JSON_FILE=`ls ${BASEDIR}/${DFT_JSON_PAT}`
+else
+ read -p "> JSON_file: " JSON_FILE
+ if [ ! -r "${JSON_FILE}" ]; then
+ printf "ERROR: cannot access given \`${JSON_FILE}'\n"
+ exit ${ERR_JSON}
+ fi
+fi
+printf "## use json_file: \`${JSON_FILE}'\n" | ${TOLOG}
+
+# process `nh' and `redshift' {{{
+NH_JSON=`grep '"nH' ${JSON_FILE} | sed 's/.*"nH.*":\ //' | sed 's/\ *,$//'`
+Z_JSON=`grep '"redshift' ${JSON_FILE} | sed 's/.*"redshift.*":\ //' | sed 's/\ *,$//'`
+printf "## get nh: \`${NH_JSON}' (from \`${JSON_FILE}')\n" | ${TOLOG}
+printf "## get redshift: \`${Z_JSON}' (from \`${JSON_FILE}')\n" | ${TOLOG}
+## if `nh' and `redshift' supplied in cmdline, then use them
+if [ ! -z "${nh}" ]; then
+ N_H=${nh}
+else
+ N_H=${NH_JSON}
+fi
+# redshift
+if [ ! -z "${z}" ]; then
+ REDSHIFT=${z}
+else
+ REDSHIFT=${Z_JSON}
+fi
+printf "## use nH: ${N_H}\n" | ${TOLOG}
+printf "## use redshift: ${REDSHIFT}\n" | ${TOLOG}
+# nh & redshift }}}
+
+# region range {{{
+if [ ! -z "${inner}" ]; then
+ INNER="${inner}"
+else
+ INNER=${DFT_INNER}
+fi
+if [ ! -z "${outer}" ]; then
+ OUTER="${outer}"
+else
+ OUTER=${DFT_OUTER}
+fi
+printf "## region range: (${INNER} - ${OUTER} R500)\n" | ${TOLOG}
+# range }}}
+
+# process `r500' {{{
+R500_RAW=`grep '"R500.*kpc' ${JSON_FILE} | sed 's/.*"R500.*":\ //' | sed 's/\ *,$//'`
+if [ ! -z "${r500}" ]; then
+ R500_RAW=${r500}
+fi
+if [ -z "${R500_RAW}" ]; then
+ printf "## input R500 followed with unit, e.g.: 800kpc, 400pix\n"
+ read -p "> value of \`R500' (in pixel/kpc): " R500_RAW
+fi
+R500_VAL=`echo "${R500_RAW}" | tr -d 'a-zA-Z, '`
+R500_UNI=`echo "${R500_RAW}" | tr -d '0-9, '`
+printf "## get \`R500': ${R500_VAL} in unit \`${R500_UNI}'\n" | ${TOLOG}
+
+# if in kpc, convert to pix
+case "${R500_UNI}" in
+ [pP]*)
+ printf "## units in \`pixel', conversion not needed\n" | ${TOLOG}
+ R500_PIX_B=`echo ${R500_VAL} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ ;;
+ *)
+ printf "## units in \`kpc', convert to \`Chandra pixel'\n" | ${TOLOG}
+ KPC_PER_PIX=`${COSCALC} ${REDSHIFT} | grep 'kpc.*pix' | tr -d 'a-zA-Z_#=(),:/ '`
+ # convert scientific notation for `bc'
+ KPC_PER_PIX_B=`echo ${KPC_PER_PIX} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ printf "## calculated \`kpc/pixel': ${KPC_PER_PIX_B}\n"
+ R500_VAL_B=`echo ${R500_VAL} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ R500_PIX_B=`echo "scale = 4; ${R500_VAL_B} / ( ${KPC_PER_PIX_B} )" | bc -l`
+ ;;
+esac
+# calc (inner-outer R500)
+R_IN=`echo "scale = 4; ${INNER} * ${R500_PIX_B}" | bc -l`
+R_OUT=`echo "scale = 4; ${OUTER} * ${R500_PIX_B}" | bc -l`
+printf "## R500 in units pixel: ${R500_PIX_B}\n" | ${TOLOG}
+printf "## (${INNER}-${OUTER} R500) range in pixel: ${R_IN} - ${R_OUT}\n" | ${TOLOG}
+# r500 }}}
+
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "> clean evt2 file: " EVT
+ if [ ! -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n" | ${TOLOG}
+
+# input and output region files {{{
+if [ -r "${regin}" ]; then
+ REG_IN="${regin}"
+elif [ -r "${DFT_REG_IN}" ]; then
+ REG_IN=${DFT_REG_IN}
+else
+ read -p "> previous used radial spec regfile: " REG_IN
+ if [ ! -r "${REG_IN}" ]; then
+ printf "ERROR: cannot access given \`${REG_IN}' region file\n"
+ exit ${ERR_REG}
+ fi
+fi
+printf "## use previous regfile: \`${REG_IN}'\n" | ${TOLOG}
+if [ ! -z "${regout}" ]; then
+ REG_OUT="${regout}"
+else
+ REG_OUT="r500avgt_${INNER}-${OUTER}.reg"
+fi
+[ -e "${REG_OUT}" ] && mv -fv ${REG_OUT} ${REG_OUT}_bak
+printf "## set output regfile: \`${REG_OUT}'\n" | ${TOLOG}
+
+# get center position from `regin'
+# only consider `pie' or `annulus'-shaped region
+TMP_REG=`grep -iE '(pie|annulus)' ${REG_IN} | head -n 1`
+XC=`echo ${TMP_REG} | tr -d 'a-zA-Z() ' | awk -F',' '{ print $1 }'`
+YC=`echo ${TMP_REG} | tr -d 'a-zA-Z() ' | awk -F',' '{ print $2 }'`
+printf "## get center coord: (${XC},${YC})\n" | ${TOLOG}
+# region files }}}
+
+# check given bkgd, determine background {{{
+if [ -r "${bkgd}" ]; then
+ BKGD=${bkgd}
+elif [ -r "${DFT_BKGD}" ]; then
+ BKGD=${DFT_BKGD}
+else
+ read -p "> background (blanksky_evt | lbkg_reg | bkg_spec): " BKGD
+ if [ ! -r "${BKGD}" ]; then
+ printf "ERROR: cannot access given \`${BKGD}'\n"
+ exit ${ERR_BKG}
+ fi
+fi
+printf "## use bkgd: \`${BKGD}'\n" | ${TOLOG}
+# determine bkg type: blanksky, lbkg_reg, bkg_spec ?
+# according to file type first: text / FITS
+# if FITS, then get values of `HDUCLAS1' and `OBJECT'
+if file -bL ${BKGD} | grep -qi 'text'; then
+ printf "## given \`${BKGD}' is a \`text file'\n"
+ printf "## use it as local bkg region file\n"
+ printf "## use *LOCAL BKG SPEC*\n" | ${TOLOG}
+ # just set flags, extract spectrum later
+ USE_LBKG_REG=YES
+ USE_BLANKSKY=NO
+ USE_BKG_SPEC=NO
+elif file -bL ${BKGD} | grep -qi 'FITS'; then
+ printf "## given \`${BKGD}' is a \`FITS file'\n"
+ # get FITS header keyword
+ HDUCLAS1=`dmkeypar ${BKGD} HDUCLAS1 echo=yes`
+ if [ "${HDUCLAS1}" = "EVENTS" ]; then
+ # event file
+ printf "## given file is \`event'\n"
+ # check if `blanksky' or `stowed bkg'
+ BKG_OBJ=`dmkeypar ${BKGD} OBJECT echo=yes`
+ if [ "${BKG_OBJ}" = "BACKGROUND DATASET" ] || [ "${BKG_OBJ}" = "ACIS STOWED" ]; then
+ # valid bkg evt file
+ printf "## given FITS file is a valid bkgrnd file\n"
+ printf "## use *BLANKSKY*\n" | ${TOLOG}
+ USE_BLANKSKY=YES
+ USE_LBKG_REG=NO
+ USE_BKG_SPEC=NO
+ # specify `BLANKSKY'
+ BLANKSKY=${BKGD}
+ else
+ # invalid bkg evt file
+ printf "ERROR: invalid bkg evt file given\n"
+ exit ${ERR_BKGTY}
+ fi
+ elif [ "${HDUCLAS1}" = "SPECTRUM" ]; then
+ # spectrum file
+ printf "## given file is \`spectrum'\n"
+ printf "## use *BKG SPECTRUM*\n" | ${TOLOG}
+ USE_BKG_SPEC=YES
+ USE_BLANKSKY=NO
+ USE_LBKG_REG=NO
+ # specify `BKG_SPEC'
+ BKG_SPEC=${BKGD}
+ else
+ # other type
+ printf "ERROR: other type FITS given\n"
+ exit ${ERR_BKGTY}
+ fi
+else
+ printf "ERROR: given \`${BKGD}' type UNKNOWN\n"
+ exit ${ERR_BKGTY}
+fi
+# bkgd }}}
+
+# check `arf' and `rmf' {{{
+if [ -r "${arf}" ]; then
+ ARF=${arf}
+elif [ -r "${DFT_ARF}" ]; then
+ ARF=${DFT_ARF}
+else
+ read -p "> provide the ARF to use: " ARF
+ if [ ! -r "${ARF}" ]; then
+ printf "ERROR: cannot access given \`${ARF}'\n"
+ exit ${ERR_ARF}
+ fi
+fi
+printf "## use ARF: \`${ARF}'\n" | ${TOLOG}
+# rmf
+if [ -r "${rmf}" ]; then
+ RMF=${rmf}
+elif [ -r "${DFT_RMF}" ]; then
+ RMF=${DFT_RMF}
+else
+ read -p "> provide the RMF to use: " RMF
+ if [ ! -r "${RMF}" ]; then
+ printf "ERROR: cannot access given \`${RMF}'\n"
+ exit ${ERR_RMF}
+ fi
+fi
+printf "## use RMF: \`${RMF}'\n" | ${TOLOG}
+# arf & rmf }}}
+
+# check given `grpcmd'
+if [ ! -z "${grpcmd}" ]; then
+ GRP_CMD="${grpcmd}"
+else
+ GRP_CMD="${DFT_GRP_CMD}"
+fi
+printf "## use grppha cmd: \`${GRP_CMD}'\n" | ${TOLOG}
+## parameters }}}
+
+##################################################
+#### main
+## region related {{{
+## generate the needed region file
+printf "generate the output region file ...\n"
+cat > ${REG_OUT} << _EOF_
+# Region file format: CIAO version 1.0
+pie(${XC},${YC},${R_IN},${R_OUT},0,360)
+_EOF_
+
+## open the evt file to verify or modify
+printf "## check the generated pie region ...\n"
+printf "## if modified, save with the same name \`${REG_OUT}' (overwrite)\n"
+ds9 ${EVT} -region ${REG_OUT}
+
+## check the (modified) region (pie region end angle)
+printf "check the above region (for pie region end angle) ...\n"
+INVALID=`grep -i 'pie' ${REG_OUT} | awk -F'[,()]' '$7 > 360'`
+if [ "x${INVALID}" != "x" ]; then
+ printf "*** WARNING: there are pie regions' END_ANGLE > 360\n" | ${TOLOG}
+ printf "*** will to fix ...\n"
+ mv -fv ${REG_OUT} ${REG_OUT}_tmp
+ # using `awk' to fix
+ awk -F'[,()]' '{
+ if ($7 > 360) {
+ printf "%s(%.2f,%.2f,%.2f,%.2f,%.2f,%.2f)\n", $1,$2,$3,$4,$5,$6,($7-360)
+ }
+ else {
+ print $0
+ }
+ }' ${REG_OUT}_tmp > ${REG_OUT}
+ rm -f ${REG_OUT}_tmp
+fi
+## region related }}}
+
+## generate spectrum {{{
+# object
+AVGT_SPEC="${REG_OUT%.reg}.pi"
+AVGT_SPEC_GRP="${AVGT_SPEC%.pi}_grp.pi"
+printf "extract object spectrum \`${AVGT_SPEC}' ...\n"
+punlearn dmextract
+dmextract infile="${EVT}[sky=region(${REG_OUT})][bin PI]" \
+ outfile="${AVGT_SPEC}" wmap="[bin det=8]" clobber=yes
+# group spectrum
+printf "group object spectrum ...\n"
+grppha infile="${AVGT_SPEC}" outfile="${AVGT_SPEC_GRP}" \
+ comm="${GRP_CMD} & exit" clobber=yes > /dev/null
+
+# background
+printf "generate the background spectrum ...\n"
+AVGT_BKG="${AVGT_SPEC%.pi}_bkg.pi"
+if [ "${USE_BLANKSKY}" = "YES" ]; then
+ # use blanksky as background file
+ printf "extract spectrum from blanksky ...\n"
+ punlearn dmextract
+ dmextract infile="${BLANKSKY}[sky=region(${REG_OUT})][bin PI]" \
+ outfile=${AVGT_BKG} wmap="[bin det=8]" clobber=yes
+elif [ "${USE_LBKG_REG}" = "YES" ]; then
+ printf "extract local background ...\n"
+ punlearn dmextract
+ dmextract infile="${EVT}[sky=region(${BKGD})][bin PI]" \
+ outfile=${AVGT_BKG} wmap="[bin det=8]" clobber=yes
+elif [ "${USE_BKG_SPEC}" = "YES" ]; then
+ printf "copy specified background spectrum ...\n"
+ cp -fv ${BKG_SPEC} ${AVGT_BKG}
+fi
+
+printf "renormalize the background ...\n"
+bkg_renorm ${AVGT_SPEC} ${AVGT_BKG}
+
+## spectrum }}}
+
+## generate XSPEC script {{{
+printf "generate a XSPEC script ...\n"
+# default output xspec scripts
+XSPEC_SCRIPT="xspec_${REG_OUT%.reg}.xcm"
+[ -e "${XSPEC_SCRIPT}" ] && mv -fv ${XSPEC_SCRIPT} ${XSPEC_SCRIPT}_bak
+cat > ${XSPEC_SCRIPT} << _EOF_
+## XSPEC script
+## spectrum analysis to get the average temperatue with (0.1-0.5 R500)
+##
+## generated by: \``basename $0`'
+## date: \``date`'
+##
+
+# xspec settings
+statistic chi
+abund grsa
+query yes
+
+# data
+data ${AVGT_SPEC_GRP}
+response ${RMF}
+arf ${ARF}
+backgrnd ${AVGT_BKG}
+
+# fitting range
+ignore bad
+ignore 0.0-0.7,7.0-**
+
+# plot related
+setplot energy
+
+method leven 10 0.01
+xsect bcmc
+cosmo 70 0 0.73
+xset delta 0.01
+systematic 0
+
+# model
+model wabs*apec
+ ${N_H} -0.001 0 0 100000 1e+06
+ 1.0 0.01 0.008 0.008 64 64
+ 0.4 0.001 0 0 5 5
+ ${REDSHIFT} -0.01 -0.999 -0.999 10 10
+ 0.0 0.01 0 0 1e+24 1e+24
+
+## xspec script end
+_EOF_
+## xspec script }}}
+
+exit 0
+
diff --git a/scripts/ciao_sbp_v3.1.sh b/scripts/ciao_sbp_v3.1.sh
new file mode 100755
index 0000000..d111b7f
--- /dev/null
+++ b/scripts/ciao_sbp_v3.1.sh
@@ -0,0 +1,529 @@
+#!/bin/sh
+#
+unalias -a
+export LC_COLLATE=C
+###########################################################
+## extract `surface brighness profile' ##
+## ##
+## NOTES: ##
+## only ACIS-I (chip: 0-3) and ACIS-S (chip: 7) supported##
+## `merge_all' conflict with Heasoft's `pget' etc. tools ##
+## ##
+## LIweitiaNux <liweitianux@gmail.com> ##
+## August 16, 2012 ##
+###########################################################
+
+###########################################################
+## Changes made by zzh: (2013-02-01)
+## removes the region in ccd gap of ACIS_I
+## removes the region in the area of point source
+## need asol file to prevent offset
+###########################################################
+
+SCRIPT_PATH=`readlink -f $0`
+SCRIPT_DIR=`dirname ${SCRIPT_PATH}`
+CCDGAP_SCRIPT="chandra_ccdgap_rect.py"
+
+## about, used in `usage' {{{
+VERSION="v3.1"
+UPDATE="2013-02-01"
+## about }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_CELL_REG=15
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+ERR_DET=41
+ERR_ENG=42
+ERR_CIAO=100
+## error code }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt_e=<evt_e_file> reg=<sbp_reg> expmap=<exp_map> cellreg=<cell_reg> aspec=<asol_file> [bkg=<bkg> log=<logfile> ]\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default energy band
+E_RANGE="700:7000"
+# default ccd edge cut pixel (20pixel)
+DFT_CCD_EDGECUT=25
+# default `event file' which used to match `blanksky' files
+#DFT_EVT="_NOT_EXIST_"
+DFT_EVT_E="`ls evt2_*_e*.fits 2> /dev/null`"
+# default expmap
+DFT_EXPMAP="`ls expmap*.fits 2> /dev/null | head -n 1`"
+# default `radial region file' to extract surface brightness
+#DFT_SBP_REG="_NOT_EXIST_"
+DFT_SBP_REG="sbprofile.reg"
+# defalut pointsource region file
+DFT_CELL_REG="`ls celld*.reg 2> /dev/null`"
+# defalut asol file
+DFT_ASOL_FILE="`ls pcad*asol*fits 2> /dev/null`"
+
+# default `log file'
+DFT_LOGFILE="sbp_`date '+%Y%m%d'`.log"
+
+## default parameters }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+## functions }}}
+
+## check CIAO init {{{
+if [ -z "${ASCDS_INSTALL}" ]; then
+ printf "ERROR: CIAO NOT initialized\n"
+ exit ${ERR_CIAO}
+fi
+
+## XXX: heasoft's `pget' etc. tools conflict with some CIAO tools
+printf "set \$PATH to avoid conflicts between HEAsoft and CIAO\n"
+export PATH="${ASCDS_BIN}:${ASCDS_CONTRIB}:${PATH}"
+printf "## PATH: ${PATH}\n"
+## check CIAO }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+## check log parameters {{{
+if [ ! -z "${log}" ]; then
+ LOGFILE="${log}"
+else
+ LOGFILE=${DFT_LOGFILE}
+fi
+printf "## use logfile: \`${LOGFILE}'\n"
+[ -e "${LOGFILE}" ] && mv -fv ${LOGFILE} ${LOGFILE}_bak
+TOLOG="tee -a ${LOGFILE}"
+echo "process script: `basename $0`" >> ${LOGFILE}
+echo "process date: `date`" >> ${LOGFILE}
+## log }}}
+
+# check given parameters
+# check evt file
+if [ -r "${evt_e}" ]; then
+ EVT_E=${evt_e}
+elif [ -r "${DFT_EVT_E}" ]; then
+ EVT_E=${DFT_EVT_E}
+else
+ read -p "clean evt2 file: " EVT_E
+ if [ ! -r "${EVT_E}" ]; then
+ printf "ERROR: cannot access given \`${EVT_E}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt_eng file: \`${EVT_E}'\n" | ${TOLOG}
+# check evt file
+if [ -r "${expmap}" ]; then
+ EXPMAP=${expmap}
+elif [ -r "${DFT_EXPMAP}" ]; then
+ EXPMAP=${DFT_EXPMAP}
+else
+ read -p "expocure map file: " EXPMAP
+ if [ ! -r "${EXPMAP}" ]; then
+ printf "ERROR: cannot access given \`${EXPMAP}' expmap file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use expmap file: \`${EXPMAP}'\n" | ${TOLOG}
+# check given region file(s)
+if [ -r "${reg}" ]; then
+ SBP_REG=${reg}
+elif [ -r "${DFT_SBP_REG}" ]; then
+ SBP_REG=${DFT_SBP_REG}
+else
+ read -p "> surface brighness radial region file: " SBP_REG
+ if [ ! -r "${SBP_REG}" ]; then
+ printf "ERROR: cannot access given \`${SBP_REG}' region file\n"
+ exit ${ERR_REG}
+ fi
+fi
+printf "## use reg file(s): \`${SBP_REG}'\n" | ${TOLOG}
+# check bkg
+if [ ! -z "${bkg}" ]; then
+ BKG=${bkg}
+#else
+# read -p "> bkg: " BKG
+fi
+if [ -r "${BKG}" ]; then
+ printf "## use bkg: \`${BKG}'\n" | ${TOLOG}
+else
+ BKG="NULL"
+fi
+# check cell region file
+if [ -r "${cellreg}" ]; then
+ CELL_REG="${cellreg}"
+elif [ -r "${DFT_CELL_REG}" ] ; then
+ CELL_REG="${DFT_CELL_REG}"
+else
+ read -p "> celldetect region file: " CELL_REG
+ if [ ! -r "${CELL_REG}" ]; then
+ printf "ERROR: cannot access given \`${CELL_REG}' region file \n"
+ exit ${ERR_CELL_REG}
+ fi
+fi
+printf "## use cell reg file(s): \`${CELL_REG}'\n" | ${TOLOG}
+# check asol file
+if [ -r "${aspec}" ]; then
+ ASOL_FILE="${aspec}"
+elif [ -r "${DFT_ASOL_FILE}" ] ; then
+ ASOL_FILE="${DFT_ASOL_FILE}"
+else
+ read -p ">asol file: " ASOL_FILE
+ if [ ! -r "${ASOL_FILE}" ] ; then
+ printf " ERROR: cannot access asol file \n"
+ exit ${ERR_ASOL}
+ fi
+fi
+printf "## use asol file(s) : \`${ASOL_FILE}'\n" | ${TOLOG}
+## parameters }}}
+
+## determine ACIS type {{{
+# consistent with `ciao_procevt'
+punlearn dmkeypar
+DETNAM=`dmkeypar ${EVT_E} DETNAM echo=yes`
+if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+ printf "## \`DETNAM' (${DETNAM}) has chips 0123 -> ACIS-I\n"
+ ACIS_TYPE="I"
+ CCD="0:3"
+elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+ printf "## \`DETNAM' (${DETNAM}) has chip 7 -> ACIS-S\n"
+ ACIS_TYPE="S"
+ CCD="7"
+else
+ printf "ERROR: unknown detector type: ${DETNAM}\n"
+ exit ${ERR_DET}
+fi
+## ACIS type }}}
+
+## check validity of pie region {{{
+INVALID=`grep -i 'pie' ${SBP_REG} | awk -F'[,()]' '$7 > 360'`
+SBP_REG_FIX="_${SBP_REG%.reg}_fix.reg"
+if [ "x${INVALID}" != "x" ]; then
+ printf "*** WARNING: some pie regions' END_ANGLE > 360\n" | ${TOLOG}
+ printf "*** script will fix ...\n"
+ cp -fv ${SBP_REG} ${SBP_REG_FIX}
+ # using `awk' to fix
+ awk -F'[,()]' '{
+ if ($7 > 360) {
+ printf "%s(%.2f,%.2f,%.2f,%.2f,%.2f,%.2f)\n", $1,$2,$3,$4,$5,$6,($7-360)
+ }
+ else {
+ print $0
+ }
+ }' ${SBP_REG} | sed '/^\#/d' > ${SBP_REG_FIX}
+else
+ cat ${SBP_REG} | sed '/^\#/d' >${SBP_REG_FIX}
+fi
+## pie validity }}}
+
+## main process {{{
+
+## generate `skyfov'
+# XXX: omit `aspec', NOT provide `asol' file
+# otherwise the size of evt_img NOT match the `expmap'
+printf "generate skyfov ...\n"
+SKYFOV="_skyfov.fits"
+punlearn skyfov
+skyfov infile="${EVT_E}" outfile="${SKYFOV}" aspec="${ASOL_FILE}" clobber=yes
+
+## get CCD fov regions {{{
+printf "make regions for CCD ...\n"
+TMP_LIST="_tmp_list.txt"
+TMP_REC="_tmp_rec.reg"
+if [ "${ACIS_TYPE}" = "S" ]; then
+ # ACIS-S
+ punlearn dmlist
+ dmlist infile="${SKYFOV}[ccd_id=${CCD}][cols POS]" opt="data,clean" | awk '{for (i=1;i<=NF;i++) print $i }' |sed -e ':a;N;s/\n/,/;ta' | awk -F"]," '{print "polygon("$2}' | awk -F"NaN" '{print $1}' >${TMP_LIST}
+ python ${SCRIPT_DIR}/${CCDGAP_SCRIPT} ${TMP_LIST} >${TMP_REC}
+ XC=` cat ${TMP_REC} | awk -F\( '{print $2}' |awk -F\) '{print $1}' |awk -F\, '{print $1}'`
+ YC=` cat ${TMP_REC} | awk -F\( '{print $2}' |awk -F\) '{print $1}' |awk -F\, '{print $2}'`
+ ADD_L=` cat ${TMP_REC} | awk -F\( '{print $2}' |awk -F\) '{print $1}' |awk -F\, '{print $3/2}'`
+ ANG=` cat ${TMP_REC} | awk -F\( '{print $2}' |awk -F\) '{print $1}' |awk -F\, '{print $5}'`
+ while [ 1 -eq 1 ]; do
+ if [ `echo "${ANG} < 0" |bc -l ` -eq 1 ] ; then
+ ANG=` echo " ${ANG} + 90 " | bc -l `
+ elif [ `echo "${ANG} >=90" |bc -l ` -eq 1 ] ; then
+ ANG=` echo " ${ANG} - 90 " | bc -l`
+ else
+ break
+ fi
+ done
+ ANG=`echo "${ANG}/180*3.1415926" |bc -l`
+ CCD_1_X_RAW=` echo " ${XC} ${ADD_L} ${ANG} "| awk '{print $1-$2*cos($3)-$2*sin($3)}' `
+ CCD_2_X_RAW=` echo " ${XC} ${ADD_L} ${ANG} "| awk '{print $1+$2*cos($3)-$2*sin($3)}' `
+ CCD_3_X_RAW=` echo " ${XC} ${ADD_L} ${ANG} "| awk '{print $1-$2*cos($3)+$2*sin($3)}' `
+ CCD_4_X_RAW=` echo " ${XC} ${ADD_L} ${ANG} "| awk '{print $1+$2*cos($3)+$2*sin($3)}' `
+ CCD_1_Y_RAW=` echo " ${YC} ${ADD_L} ${ANG} "| awk '{print $1+$2*cos($3)-$2*sin($3)}' `
+ CCD_2_Y_RAW=` echo " ${YC} ${ADD_L} ${ANG} "| awk '{print $1+$2*cos($3)+$2*sin($3)}' `
+ CCD_3_Y_RAW=` echo " ${YC} ${ADD_L} ${ANG} "| awk '{print $1-$2*cos($3)-$2*sin($3)}' `
+ CCD_4_Y_RAW=` echo " ${YC} ${ADD_L} ${ANG} "| awk '{print $1-$2*cos($3)+$2*sin($3)}' `
+ CCD_1_RAW=` echo "${CCD_1_X_RAW},${CCD_1_Y_RAW}"`
+ CCD_2_RAW=` echo "${CCD_2_X_RAW},${CCD_2_Y_RAW}"`
+ CCD_3_RAW=` echo "${CCD_3_X_RAW},${CCD_3_Y_RAW}"`
+ CCD_4_RAW=` echo "${CCD_4_X_RAW},${CCD_4_Y_RAW}"`
+ REG_CCD_RAW="`echo "polygon(${CCD_1_RAW}, ${CCD_2_RAW}, ${CCD_4_RAW}, ${CCD_3_RAW}) " `"
+ DX_2T1=$(echo "`echo ${CCD_2_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_1_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_2T1=$(echo "`echo ${CCD_2_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_1_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_WIDTH=`echo "sqrt(${DX_2T1}*${DX_2T1}+${DY_2T1}*${DY_2T1})" | bc -l`
+ CCD_2T1_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_2T1}/${CCD_WIDTH}" | bc -l`
+ CCD_2T1_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_2T1}/${CCD_WIDTH}" | bc -l`
+ DX_3T1=$(echo "`echo ${CCD_3_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_1_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_3T1=$(echo "`echo ${CCD_3_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_1_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_3T1_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_3T1}/${CCD_WIDTH}" | bc -l`
+ CCD_3T1_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_3T1}/${CCD_WIDTH}" | bc -l`
+ CCD_1_X=$(echo "` echo ${CCD_1_RAW} |awk -F\, '{print $1}' ` + `echo ${CCD_2T1_MOV_X}` +`echo ${CCD_3T1_MOV_X}` "| bc -l)
+ CCD_1_Y=$(echo "` echo ${CCD_1_RAW} |awk -F\, '{print $2}' ` + `echo ${CCD_2T1_MOV_Y}` +`echo ${CCD_3T1_MOV_Y}` "| bc -l)
+ DX_1T2=$(echo "`echo ${CCD_1_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_2_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_1T2=$(echo "`echo ${CCD_1_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_2_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_1T2_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_1T2}/${CCD_WIDTH}" | bc -l`
+ CCD_1T2_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_1T2}/${CCD_WIDTH}" | bc -l`
+ DX_4T2=$(echo "`echo ${CCD_4_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_2_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_4T2=$(echo "`echo ${CCD_4_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_2_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_4T2_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_4T2}/${CCD_WIDTH}" | bc -l`
+ CCD_4T2_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_4T2}/${CCD_WIDTH}" | bc -l`
+ CCD_2_X=$(echo "` echo ${CCD_2_RAW} |awk -F\, '{print $1}' ` + `echo ${CCD_1T2_MOV_X}` +`echo ${CCD_4T2_MOV_X}` "| bc -l)
+ CCD_2_Y=$(echo "` echo ${CCD_2_RAW} |awk -F\, '{print $2}' ` + `echo ${CCD_1T2_MOV_Y}` +`echo ${CCD_4T2_MOV_Y}` "| bc -l)
+ DX_1T3=$(echo "`echo ${CCD_1_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_3_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_1T3=$(echo "`echo ${CCD_1_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_3_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_1T3_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_1T3}/${CCD_WIDTH}" | bc -l`
+ CCD_1T3_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_1T3}/${CCD_WIDTH}" | bc -l`
+ DX_4T3=$(echo "`echo ${CCD_4_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_3_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_4T3=$(echo "`echo ${CCD_4_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_3_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_4T3_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_4T3}/${CCD_WIDTH}" | bc -l`
+ CCD_4T3_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_4T3}/${CCD_WIDTH}" | bc -l`
+ CCD_3_X=$(echo "` echo ${CCD_3_RAW} |awk -F\, '{print $1}' ` + `echo ${CCD_1T3_MOV_X}` +`echo ${CCD_4T3_MOV_X}` "| bc -l)
+ CCD_3_Y=$(echo "` echo ${CCD_3_RAW} |awk -F\, '{print $2}' ` + `echo ${CCD_1T3_MOV_Y}` +`echo ${CCD_4T3_MOV_Y}` "| bc -l)
+ DX_2T4=$(echo "`echo ${CCD_2_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_4_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_2T4=$(echo "`echo ${CCD_2_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_4_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_2T4_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_2T4}/${CCD_WIDTH}" | bc -l`
+ CCD_2T4_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_2T4}/${CCD_WIDTH}" | bc -l`
+ DX_3T4=$(echo "`echo ${CCD_3_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_4_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_3T4=$(echo "`echo ${CCD_3_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_4_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_3T4_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_3T4}/${CCD_WIDTH}" | bc -l`
+ CCD_3T4_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_3T4}/${CCD_WIDTH}" | bc -l`
+ CCD_4_X=$(echo "` echo ${CCD_4_RAW} |awk -F\, '{print $1}' ` + `echo ${CCD_2T4_MOV_X}` +`echo ${CCD_3T4_MOV_X}` "| bc -l)
+ CCD_4_Y=$(echo "` echo ${CCD_4_RAW} |awk -F\, '{print $2}' ` + `echo ${CCD_2T4_MOV_Y}` +`echo ${CCD_3T4_MOV_Y}` "| bc -l)
+ REG_CCD_CUT=`echo "polygon(${CCD_1_X},${CCD_1_Y},${CCD_2_X},${CCD_2_Y},${CCD_4_X},${CCD_4_Y},${CCD_3_X},${CCD_3_Y})"`
+ REG_FILE_CCD="_ccd.reg"
+ [ -e "${REG_FILE_CCD}" ] && mv -f ${REG_FILE_CCD} ${REG_FILE_CCD}_bak
+ echo "${REG_CCD_CUT}" >>${REG_FILE_CCD}
+
+elif [ "${ACIS_TYPE}" = "I" ]; then
+ # ACIS-I
+ TMP_REG_FILE_CCD="_ccd_tmp.reg"
+ [ -e "${TMP_REG_FILE_CCD}" ] && mv -f ${TMP_REG_FILE_CCD} ${TMP_REG_FILE_CCD}_bak
+ for i in `seq 0 3` ; do
+ punlearn dmlist
+ dmlist infile="${SKYFOV}[ccd_id=${i}][cols POS]" opt="data,clean" | awk '{for (i=1;i<=NF;i++) print $i }' |sed -e ':a;N;s/\n/,/;ta' | awk -F"]," '{print "polygon("$2}' | awk -F"NaN" '{print $1}' >${TMP_LIST}
+ python ${SCRIPT_DIR}/${CCDGAP_SCRIPT} ${TMP_LIST} >${TMP_REC}
+ XC=` cat ${TMP_REC} | awk -F\( '{print $2}' |awk -F\) '{print $1}' |awk -F\, '{print $1}'`
+ YC=` cat ${TMP_REC} | awk -F\( '{print $2}' |awk -F\) '{print $1}' |awk -F\, '{print $2}'`
+ ADD_L=` cat ${TMP_REC} | awk -F\( '{print $2}' |awk -F\) '{print $1}' |awk -F\, '{print $3/2}'`
+ ANG=` cat ${TMP_REC} | awk -F\( '{print $2}' |awk -F\) '{print $1}' |awk -F\, '{print $5}'`
+ while [ 1 -eq 1 ]; do
+ if [ `echo "${ANG} < 0" |bc -l ` -eq 1 ] ; then
+ ANG=` echo " ${ANG} + 90 " | bc -l `
+ elif [ `echo "${ANG} >=90" |bc -l ` -eq 1 ] ; then
+ ANG=` echo " ${ANG} - 90 " | bc -l`
+ else
+ break
+ fi
+ done
+ ANG=`echo "${ANG}/180*3.1415926" |bc -l`
+ CCD_1_X_RAW=` echo " ${XC} ${ADD_L} ${ANG} "| awk '{print $1-$2*cos($3)-$2*sin($3)}' `
+ CCD_2_X_RAW=` echo " ${XC} ${ADD_L} ${ANG} "| awk '{print $1+$2*cos($3)-$2*sin($3)}' `
+ CCD_3_X_RAW=` echo " ${XC} ${ADD_L} ${ANG} "| awk '{print $1-$2*cos($3)+$2*sin($3)}' `
+ CCD_4_X_RAW=` echo " ${XC} ${ADD_L} ${ANG} "| awk '{print $1+$2*cos($3)+$2*sin($3)}' `
+ CCD_1_Y_RAW=` echo " ${YC} ${ADD_L} ${ANG} "| awk '{print $1+$2*cos($3)-$2*sin($3)}' `
+ CCD_2_Y_RAW=` echo " ${YC} ${ADD_L} ${ANG} "| awk '{print $1+$2*cos($3)+$2*sin($3)}' `
+ CCD_3_Y_RAW=` echo " ${YC} ${ADD_L} ${ANG} "| awk '{print $1-$2*cos($3)-$2*sin($3)}' `
+ CCD_4_Y_RAW=` echo " ${YC} ${ADD_L} ${ANG} "| awk '{print $1-$2*cos($3)+$2*sin($3)}' `
+ CCD_1_RAW=` echo "${CCD_1_X_RAW},${CCD_1_Y_RAW}"`
+ CCD_2_RAW=` echo "${CCD_2_X_RAW},${CCD_2_Y_RAW}"`
+ CCD_3_RAW=` echo "${CCD_3_X_RAW},${CCD_3_Y_RAW}"`
+ CCD_4_RAW=` echo "${CCD_4_X_RAW},${CCD_4_Y_RAW}"`
+ REG_CCD_RAW=`echo "polygon(${CCD_1_RAW}, ${CCD_2_RAW}, ${CCD_4_RAW}, ${CCD_3_RAW}) " `
+ DX_2T1=$(echo "`echo ${CCD_2_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_1_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_2T1=$(echo "`echo ${CCD_2_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_1_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_WIDTH=`echo "sqrt(${DX_2T1}*${DX_2T1}+${DY_2T1}*${DY_2T1})" | bc -l`
+ CCD_2T1_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_2T1}/${CCD_WIDTH}" | bc -l`
+ CCD_2T1_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_2T1}/${CCD_WIDTH}" | bc -l`
+ DX_3T1=$(echo "`echo ${CCD_3_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_1_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_3T1=$(echo "`echo ${CCD_3_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_1_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_3T1_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_3T1}/${CCD_WIDTH}" | bc -l`
+ CCD_3T1_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_3T1}/${CCD_WIDTH}" | bc -l`
+ CCD_1_X=$(echo "` echo ${CCD_1_RAW} |awk -F\, '{print $1}' ` + `echo ${CCD_2T1_MOV_X}` +`echo ${CCD_3T1_MOV_X}` "| bc -l)
+ CCD_1_Y=$(echo "` echo ${CCD_1_RAW} |awk -F\, '{print $2}' ` + `echo ${CCD_2T1_MOV_Y}` +`echo ${CCD_3T1_MOV_Y}` "| bc -l)
+ DX_1T2=$(echo "`echo ${CCD_1_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_2_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_1T2=$(echo "`echo ${CCD_1_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_2_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_1T2_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_1T2}/${CCD_WIDTH}" | bc -l`
+ CCD_1T2_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_1T2}/${CCD_WIDTH}" | bc -l`
+ DX_4T2=$(echo "`echo ${CCD_4_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_2_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_4T2=$(echo "`echo ${CCD_4_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_2_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_4T2_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_4T2}/${CCD_WIDTH}" | bc -l`
+ CCD_4T2_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_4T2}/${CCD_WIDTH}" | bc -l`
+ CCD_2_X=$(echo "` echo ${CCD_2_RAW} |awk -F\, '{print $1}' ` + `echo ${CCD_1T2_MOV_X}` +`echo ${CCD_4T2_MOV_X}` "| bc -l)
+ CCD_2_Y=$(echo "` echo ${CCD_2_RAW} |awk -F\, '{print $2}' ` + `echo ${CCD_1T2_MOV_Y}` +`echo ${CCD_4T2_MOV_Y}` "| bc -l)
+ DX_1T3=$(echo "`echo ${CCD_1_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_3_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_1T3=$(echo "`echo ${CCD_1_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_3_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_1T3_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_1T3}/${CCD_WIDTH}" | bc -l`
+ CCD_1T3_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_1T3}/${CCD_WIDTH}" | bc -l`
+ DX_4T3=$(echo "`echo ${CCD_4_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_3_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_4T3=$(echo "`echo ${CCD_4_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_3_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_4T3_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_4T3}/${CCD_WIDTH}" | bc -l`
+ CCD_4T3_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_4T3}/${CCD_WIDTH}" | bc -l`
+ CCD_3_X=$(echo "` echo ${CCD_3_RAW} |awk -F\, '{print $1}' ` + `echo ${CCD_1T3_MOV_X}` +`echo ${CCD_4T3_MOV_X}` "| bc -l)
+ CCD_3_Y=$(echo "` echo ${CCD_3_RAW} |awk -F\, '{print $2}' ` + `echo ${CCD_1T3_MOV_Y}` +`echo ${CCD_4T3_MOV_Y}` "| bc -l)
+ DX_2T4=$(echo "`echo ${CCD_2_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_4_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_2T4=$(echo "`echo ${CCD_2_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_4_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_2T4_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_2T4}/${CCD_WIDTH}" | bc -l`
+ CCD_2T4_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_2T4}/${CCD_WIDTH}" | bc -l`
+ DX_3T4=$(echo "`echo ${CCD_3_RAW} | awk -F\, '{print $1}'`-`echo ${CCD_4_RAW} |awk -F\, '{print $1}'`" |bc -l)
+ DY_3T4=$(echo "`echo ${CCD_3_RAW} | awk -F\, '{print $2}'`-`echo ${CCD_4_RAW} |awk -F\, '{print $2}'`" |bc -l)
+ CCD_3T4_MOV_X=`echo "${DFT_CCD_EDGECUT}*${DX_3T4}/${CCD_WIDTH}" | bc -l`
+ CCD_3T4_MOV_Y=`echo "${DFT_CCD_EDGECUT}*${DY_3T4}/${CCD_WIDTH}" | bc -l`
+ CCD_4_X=$(echo "` echo ${CCD_4_RAW} |awk -F\, '{print $1}' ` + `echo ${CCD_2T4_MOV_X}` +`echo ${CCD_3T4_MOV_X}` "| bc -l)
+ CCD_4_Y=$(echo "` echo ${CCD_4_RAW} |awk -F\, '{print $2}' ` + `echo ${CCD_2T4_MOV_Y}` +`echo ${CCD_3T4_MOV_Y}` "| bc -l)
+ REG_CCD_CUT=`echo "polygon(${CCD_1_X},${CCD_1_Y},${CCD_2_X},${CCD_2_Y},${CCD_4_X},${CCD_4_Y},${CCD_3_X},${CCD_3_Y})"`
+ echo ${REG_CCD_CUT} >>${TMP_REG_FILE_CCD}
+done
+ REG_FILE_CCD="_ccd.reg"
+ [ -e "${REG_FILE_CCD}" ] && mv -fv ${REG_FILE_CCD} ${REG_FILE_CCD}_bak
+ # echo "` cat ${TMP_REG_FILE_CCD} | head -n 1 | tail -n 1` + ` cat ${TMP_REG_FILE_CCD} | head -n 2 | tail -n 1` +`cat ${TMP_REG_FILE_CCD} | head -n 3 | tail -n 1`+`cat ${TMP_REG_FILE_CCD} | head -n 4 | tail -n 1 `" >${REG_FILE_CCD}
+ cat "${TMP_REG_FILE_CCD}">${REG_FILE_CCD}
+else
+ #
+ printf "*** ERROR ACIS_TYPE ***\n"
+ exit 255
+fi
+## }}}
+
+## cut ccd region edge
+echo "${REG_CCD_RAW}" >_ccd_raw.reg
+
+# exit 233
+
+## generate new regions within CCD for dmextract
+SBP_REG_INCCD="_${SBP_REG%.reg}_inccd.reg"
+[ -e "${SBP_REG_INCCD}" ] && mv -fv ${SBP_REG_INCCD} ${SBP_REG_INCCD}_bak
+
+echo "CMD: cat ${CELL_REG} | grep \( | sed -e ':a;N;s/\n/-/;ta'"
+CELL_REG_USE=`cat ${CELL_REG} | grep \( | sed -e ':a;N;s/\n/-/;ta'`
+# exit 233
+
+if [ "${ACIS_TYPE}" = "S" ]; then
+ grep -iE '^(pie|annulus)' ${SBP_REG_FIX} | sed "s/$/\ \&\ `cat ${REG_FILE_CCD}`/" | sed "s/$/\ \-\ ${CELL_REG_USE}/" > ${SBP_REG_INCCD}
+ else
+ L=`cat ${SBP_REG_FIX} | wc -l `
+
+ for i in `seq 1 $L` ; do
+ echo "`cat ${SBP_REG_FIX} |head -n $i | tail -n 1 ` & `cat ${REG_FILE_CCD} | head -n 1 `- ${CELL_REG_USE} | `cat ${SBP_REG_FIX} |head -n $i | tail -n 1` & `cat ${REG_FILE_CCD} | head -n 2| tail -n 1 `- ${CELL_REG_USE} |`cat ${SBP_REG_FIX} |head -n $i | tail -n 1 ` & `cat ${REG_FILE_CCD} | head -n 3 | tail -n 1 `- ${CELL_REG_USE} |`cat ${SBP_REG_FIX} |head -n $i | tail -n 1 ` & `cat ${REG_FILE_CCD} | tail -n 1 `- ${CELL_REG_USE} " >>${SBP_REG_INCCD}
+ done
+fi
+# ds9 ${EVT_E} -region ${SBP_REG_INCCD}
+
+## `surface brightness profile' related data {{{
+## extract sbp
+printf "extract surface brightness profile ...\n"
+SBP_DAT="${SBP_REG%.reg}.fits"
+[ -e "${SBP_DAT}" ] && mv -fv ${SBP_DAT} ${SBP_DAT}_bak
+if [ -r "${BKG}" ]; then
+ EXPO_EVT=`dmkeypar ${EVT_E} EXPOSURE echo=yes`
+ EXPO_BKG=`dmkeypar ${BKG} EXPOSURE echo=yes`
+ BKG_NORM=`echo "${EXPO_EVT} ${EXPO_BKG}" | awk '{ printf("%g", $1/$2) }'`
+ printf " == (BKG subtracted; bkgnorm=${BKG_NORM}, energy:${E_RANGE}) ==\n"
+ punlearn dmextract
+ dmextract infile="${EVT_E}[bin sky=@${SBP_REG_INCCD}]" outfile="${SBP_DAT}" \
+ exp="${EXPMAP}" bkg="${BKG}[energy=${E_RANGE}][bin sky=@${SBP_REG_INCCD}]" \
+ bkgexp=")exp" bkgnorm=${BKG_NORM} opt=generic clobber=yes
+else
+ punlearn dmextract
+ dmextract infile="${EVT_E}[bin sky=@${SBP_REG_INCCD}]" outfile="${SBP_DAT}" \
+ exp="${EXPMAP}" opt=generic clobber=yes
+fi
+
+## add `rmid' column
+printf "add \`RMID' & \`R_ERR' column ...\n"
+SBP_RMID="${SBP_DAT%.fits}_rmid.fits"
+[ -e "${SBP_RMID}" ] && mv -fv ${SBP_RMID} ${SBP_RMID}_bak
+punlearn dmtcalc
+dmtcalc infile="${SBP_DAT}" outfile="${SBP_RMID}" \
+ expression="RMID=(R[0]+R[1])/2,R_ERR=(R[1]-R[0])/2" \
+ clobber=yes
+
+## output needed sbp data to files
+printf "output needed sbp data ...\n"
+SBP_TXT="${SBP_DAT%.fits}.txt"
+SBP_QDP="${SBP_DAT%.fits}.qdp"
+[ -e "${SBP_TXT}" ] && mv -fv ${SBP_TXT} ${SBP_TXT}_bak
+[ -e "${SBP_QDP}" ] && mv -fv ${SBP_QDP} ${SBP_QDP}_bak
+punlearn dmlist
+dmlist infile="${SBP_RMID}[cols RMID,R_ERR,SUR_FLUX,SUR_FLUX_ERR]" \
+ outfile="${SBP_TXT}" opt="data,clean"
+
+## QDP for sbp {{{
+printf "generate a handy QDP file for sbp ...\n"
+cp -fv ${SBP_TXT} ${SBP_QDP}
+# change comment sign
+sed -i'' 's/#/!/g' ${SBP_QDP}
+# add QDP commands
+sed -i'' '1 i\
+READ SERR 1 2' ${SBP_QDP}
+sed -i'' '2 i\
+LABEL Y "Surface Flux (photons/cm\\u2\\d/pixel\\u2\\d/s)"' ${SBP_QDP}
+sed -i'' '2 i\
+LABEL X "Radius (pixel)"' ${SBP_QDP}
+sed -i'' '2 i\
+LABEL T "Surface Brightness Profile"' ${SBP_QDP}
+## QDP }}}
+
+printf "generate sbp fitting needed files ...\n"
+SBP_RADIUS="radius_sbp.txt"
+SBP_FLUX="flux_sbp.txt"
+[ -e "${SBP_RADIUS}" ] && mv -fv ${SBP_RADIUS} ${SBP_RADIUS}_bak
+[ -e "${SBP_FLUX}" ] && mv -fv ${SBP_FLUX} ${SBP_FLUX}_bak
+punlearn dmlist
+dmlist infile="${SBP_RMID}[cols R]" \
+ opt="data,clean" | awk '{ print $2 }' > ${SBP_RADIUS}
+# change the first line `R[2]' to `0.0'
+sed -i'' 's/R.*/0\.0/' ${SBP_RADIUS}
+dmlist infile="${SBP_RMID}[cols SUR_FLUX,SUR_FLUX_ERR]" \
+ opt="data,clean" > ${SBP_FLUX}
+# remove the first comment line
+sed -i'' '/#.*/d' ${SBP_FLUX}
+
+## sbp data }}}
+
+## main }}}
+
+exit 0
+
diff --git a/scripts/clean_massdir.sh b/scripts/clean_massdir.sh
new file mode 100755
index 0000000..dc9c4d7
--- /dev/null
+++ b/scripts/clean_massdir.sh
@@ -0,0 +1,219 @@
+#!/bin/sh
+#
+# clean files in `mass' dir
+#
+# v2, 2013/05/04, LIweitiaNux
+# available for `empty mass dir'
+#
+
+IMG_DIR=${1:-../img}
+SPC_DIR=${2:-../spc/profile}
+
+if [ ! -f fitting_mass.conf ] || [ ! -f fitting_dbeta_mass.conf ]; then
+ # empty mass dir
+ printf "## EMPTY mass dir ...\n"
+ rm -rf *.*
+
+ # expcorr_conf
+ EXPCORR_CONF=`ls ${IMG_DIR}/*_expcorr.conf 2> /dev/null`
+ if [ ! -z "${EXPCORR_CONF}" ]; then
+ Z=`grep '^z' ${EXPCORR_CONF} | awk '{ print $2 }'`
+ N_H=`grep '^nh' ${EXPCORR_CONF} | awk '{ print $2 }'`
+ ABUND=`grep '^abund' ${EXPCORR_CONF} | awk '{ print $2 }'`
+ printf "# redshift: ${Z}\n"
+ printf "# nh: ${N_H}\n"
+ printf "# abund: ${ABUND}\n"
+ fi
+
+ # cosmo_calc
+ COSMO_CALC=`which cosmo_calc 2> /dev/null`
+ if [ ! -z "${COSMO_CALC}" ]; then
+ CM_PER_PIXEL=`${COSMO_CALC} ${Z} | grep 'cm/pixel' | awk -F':' '{ print $2 }' | tr -d ' '`
+ printf "# cm_per_pixel: ${CM_PER_PIXEL}\n"
+ fi
+
+ cat > wang2012_param.txt << _EOF_
+A 5.0 1.0 500 T
+n 5.0 0.1 10 T
+xi 0.3 0.1 1.0 T
+a2 2000 1000 1e+05 T
+a3 1000 600 3000 T
+beta 0.5 0.1 0.5 T
+T0 1.0 1.0 2.0 T
+_EOF_
+
+ cat > fitting_mass.conf << _EOF_
+t_profile wang2012
+t_data_file tcl_temp_profile.txt
+t_param_file wang2012_param.txt
+sbp_cfg fitting_sbp.conf
+nh ${N_H}
+abund ${ABUND}
+radius_sbp_file sbprofile.txt
+# nfw_rmin_kpc 1
+_EOF_
+
+ cat > fitting_dbeta_mass.conf << _EOF_
+t_profile wang2012
+t_data_file tcl_temp_profile.txt
+t_param_file wang2012_param.txt
+sbp_cfg fitting_dbeta_sbp.conf
+nh ${N_H}
+abund ${ABUND}
+radius_sbp_file sbprofile.txt
+# nfw_rmin_kpc 1
+_EOF_
+
+ cat > fitting_sbp.conf << _EOF_
+radius_file radius_sbp.txt
+sbp_file flux_sbp.txt
+
+cfunc_file coolfunc_calc_data.txt
+T_file t_profile_dump.qdp
+
+n0 0.005
+rc 30
+beta 0.7
+bkg 0
+
+cm_per_pixel ${CM_PER_PIXEL}
+z ${Z}
+_EOF_
+
+ cat > fitting_dbeta_sbp.conf << _EOF_
+radius_file radius_sbp.txt
+sbp_file flux_sbp.txt
+
+cfunc_file coolfunc_calc_data.txt
+T_file t_profile_dump.qdp
+
+n01 0.05
+rc1 30
+beta1 0.7
+n02 0.005
+rc2 300
+beta2 0.7
+bkg 0
+
+cm_per_pixel ${CM_PER_PIXEL}
+z ${Z}
+_EOF_
+
+ # link files
+ [ -f ${IMG_DIR}/flux_sbp.txt ] && ln -svf ${IMG_DIR}/flux_sbp.txt .
+ [ -f ${IMG_DIR}/radius_sbp.txt ] && ln -svf ${IMG_DIR}/radius_sbp.txt .
+ [ -f ${IMG_DIR}/sbprofile.txt ] && ln -svf ${IMG_DIR}/sbprofile.txt .
+ [ -f ${SPC_DIR}/tcl_temp_profile.txt ] && ln -svf ${SPC_DIR}/tcl_temp_profile.txt .
+ exit 0
+fi
+
+########################################################################
+rm -rf _*
+rm -rf *?backup*
+rm -rf *backup?*
+rm *_bak *.log
+rm global.cfg flux_sbp.txt radius_sbp.txt sbprofile.txt
+rm tcl_temp_profile.qdp tcl_temp_profile.txt
+
+mkdir backup
+cp fitting_mass.conf fitting_sbp.conf backup/
+cp fitting_dbeta_mass.conf fitting_dbeta_sbp.conf backup/
+cp beta_param_center.txt dbeta_param_center.txt backup/
+cp wang2012_param.txt backup/
+cp results_mrl.txt final_result.txt backup/
+
+rm *.*
+
+[ -f backup/fitting_sbp.conf ] && cp backup/fitting_sbp.conf .
+[ -f backup/wang2012_param.txt ] && cp backup/wang2012_param.txt .
+[ -f fitting_mass.conf ] && cp fitting_mass.conf fitting_dbeta_mass.conf
+[ -f fitting_sbp.conf ] && cp fitting_sbp.conf fitting_dbeta_sbp.conf
+
+SBP_CONF=`ls backup/fitting_sbp.conf backup/fitting_dbeta_sbp.conf 2>/dev/null | head -n 1`
+EXPCORR_CONF=`ls ${IMG_DIR}/*_expcorr.conf`
+N_H=`grep '^nh' ${EXPCORR_CONF} | awk '{ print $2 }'`
+
+sed -i'' "s/^beta.*$/beta 0.5 0.1 0.7 T/" wang2012_param.txt
+
+if [ -f backup/fitting_mass.conf ]; then
+ # mass_conf
+ cp backup/fitting_mass.conf .
+ sed -i'' "s/^t_data_file.*$/t_data_file tcl_temp_profile.txt/" fitting_mass.conf
+ sed -i'' "s/^nh.*$/nh ${N_H}/" fitting_mass.conf
+ cp fitting_mass.conf fitting_dbeta_mass.conf
+ sed -i'' "s/fitting_sbp/fitting_dbeta_sbp/" fitting_dbeta_mass.conf
+ # sbp_conf
+ cp backup/fitting_sbp.conf .
+ SBP_CONF="fitting_sbp.conf"
+ SBP2_CONF="fitting_dbeta_sbp.conf"
+elif [ -f backup/fitting_dbeta_mass.conf ]; then
+ # mass_conf
+ cp backup/fitting_mass.conf .
+ cp backup/fitting_dbeta_mass.conf .
+ sed -i'' "s/^t_data_file.*$/t_data_file tcl_temp_profile.txt/" fitting_dbeta_mass.conf
+ sed -i'' "s/^nh.*$/nh ${N_H}/" fitting_dbeta_mass.conf
+ cp fitting_dbeta_mass.conf fitting_mass.conf
+ sed -i'' "s/fitting_dbeta_sbp/fitting_sbp/" fitting_mass.conf
+ # sbp_conf
+ cp backup/fitting_dbeta_sbp.conf .
+ SBP_CONF="fitting_dbeta_sbp.conf"
+ SBP2_CONF="fitting_sbp.conf"
+else
+ #
+ printf "*** ERROR: fitting_mass.conf & fitting_dbeta_mass.conf not exists ***\n"
+fi
+
+radius_file=`grep '^radius_file' $SBP_CONF | awk '{ print $2 }'`
+sbp_file=`grep '^sbp_file' $SBP_CONF | awk '{ print $2 }'`
+cfunc_file=`grep '^cfunc_file' $SBP_CONF | awk '{ print $2 }'`
+T_file=`grep '^T_file' $SBP_CONF | awk '{ print $2 }'`
+cm_per_pixel=`grep '^cm_per_pixel' $SBP_CONF | awk '{ print $2 }'`
+z=`grep '^z' $SBP_CONF | awk '{ print $2 }'`
+
+if [ "x${SBP2_CONF}" = "xfitting_sbp.conf" ]; then
+ rm -f ${SBP2_CONF}
+ cat > ${SBP2_CONF} << _EOF_
+radius_file $radius_file
+sbp_file $sbp_file
+
+cfunc_file $cfunc_file
+T_file $T_file
+
+n0 0.005
+rc 30
+beta 0.7
+bkg 0
+
+cm_per_pixel $cm_per_pixel
+z $z
+_EOF_
+elif [ "x${SBP2_CONF}" = "xfitting_dbeta_sbp.conf" ]; then
+ rm -f ${SBP2_CONF}
+ cat > ${SBP2_CONF} << _EOF_
+radius_file $radius_file
+sbp_file $sbp_file
+
+cfunc_file $cfunc_file
+T_file $T_file
+
+n01 0.05
+rc1 30
+beta1 0.7
+n02 0.005
+rc2 300
+beta2 0.7
+bkg 0
+
+cm_per_pixel $cm_per_pixel
+z $z
+_EOF_
+else
+ #
+ printf "*** ERROR ***\n"
+fi
+
+[ -f ${IMG_DIR}/flux_sbp.txt ] && ln -sf ${IMG_DIR}/flux_sbp.txt .
+[ -f ${IMG_DIR}/radius_sbp.txt ] && ln -sf ${IMG_DIR}/radius_sbp.txt .
+[ -f ${IMG_DIR}/sbprofile.txt ] && ln -sf ${IMG_DIR}/sbprofile.txt .
+[ -f ${SPC_DIR}/tcl_temp_profile.txt ] && ln -sf ${SPC_DIR}/tcl_temp_profile.txt .
+
diff --git a/tools/cosmo_calc/Makefile b/tools/cosmo_calc/Makefile
new file mode 100644
index 0000000..abd12bb
--- /dev/null
+++ b/tools/cosmo_calc/Makefile
@@ -0,0 +1,31 @@
+##
+## Makefile for `calc_distance'
+##
+## Junhua Gu
+## last modified: August 12, 2012
+##
+
+CPP= g++
+CPPFLAGS= -Wall -g
+
+TARGET= cosmo_calc
+
+all: $(TARGET)
+
+cosmo_calc: calc_distance.o ddivid.o main.o
+ $(CPP) $(CPPFLAGS) $^ -o $@
+
+calc_distance.o: calc_distance.cc calc_distance.h
+ $(CPP) $(CPPFLAGS) -c $<
+
+ddivid.o: ddivid.cc ddivid.h
+ $(CPP) $(CPPFLAGS) -c $<
+
+main.o: main.cc ddivid.h calc_distance.h
+ $(CPP) $(CPPFLAGS) -c $<
+
+clean:
+ rm -f *.o
+ rm -f $(TARGET)
+
+
diff --git a/tools/cosmo_calc/README.txt b/tools/cosmo_calc/README.txt
new file mode 100644
index 0000000..b18e120
--- /dev/null
+++ b/tools/cosmo_calc/README.txt
@@ -0,0 +1,20 @@
+//
+// simple `cosmology calculator'
+// can calc commonly used cosmology distances:
+// hubble distance
+// comoving distance
+// transverse comoving distance
+// angular diameter distance
+// luminoisity distance
+// light-travel distance
+// in addition, this calculator also calc some other
+// useful infomation related with Chandra
+//
+// Junhua Gu
+// LIweitiaNux <liweitianux@gmail.com>
+//
+// Version 2.0
+//
+// Last Modified:
+// August 12, 2012
+//
diff --git a/tools/cosmo_calc/VERSION.txt b/tools/cosmo_calc/VERSION.txt
new file mode 100644
index 0000000..f99d493
--- /dev/null
+++ b/tools/cosmo_calc/VERSION.txt
@@ -0,0 +1,10 @@
+Version 2.2
+2013/02/09
+
+ChangeLogs:
+v2.2:
+ 2013/02/09, LIweitiaNux
+ add 'hubble_parameter E(z)'
+ modify output format
+
+
diff --git a/tools/cosmo_calc/adapt_trapezoid.h b/tools/cosmo_calc/adapt_trapezoid.h
new file mode 100644
index 0000000..35d22b4
--- /dev/null
+++ b/tools/cosmo_calc/adapt_trapezoid.h
@@ -0,0 +1,124 @@
+#ifndef ADAPT_TRAPEZOID_H
+#define ADAPT_TRAPEZOID_H
+#include <list>
+#include <utility>
+#include <cassert>
+//#include <iostream>
+
+//using namespace std;
+
+template<typename T1,typename T2,typename T3>
+class triple
+{
+ public:
+ T1 first;
+ T2 second;
+ T3 third;
+ triple(T1 x1,T2 x2,T3 x3)
+ :first(x1),second(x2),third(x3)
+ {
+ }
+};
+
+
+template<typename T1,typename T2,typename T3>
+triple<T1,T2,T3> make_triple(T1 x1,T2 x2,T3 x3)
+{
+ return triple<T1,T2,T3>(x1,x2,x3);
+}
+
+template <typename T>
+T trapezoid(T (*fun)(T),T x1,T x2,T err_limit)
+{
+ int n=256;
+ T result;
+ const int max_division=24;
+ T old_value=0;
+ for(int i=1;i<max_division;i++)
+ {
+ result=0.;
+ n*=2;
+ T step=(x2-x1)/n;
+ for(int j=0;j<n;j++)
+ {
+ result+=(fun(x1+(j+1)*step)+fun(x1+j*step))*step/T(2.);
+ }
+ old_value-=result;
+ old_value=old_value<0?-old_value:old_value;
+ if(old_value<err_limit)
+ {
+ return result;
+ }
+ old_value=result;
+ }
+}
+
+
+template <typename T>
+T adapt_trapezoid(T (*fun)(T),T x1,T x2,T err_limit)
+{
+ // const err_limit=.001;
+ typedef triple<T,T,bool> interval;
+ /*T for interval type,
+ bool for state trur for still to be updated,
+ false for do not need to be updated
+ */
+ std::list<interval> interval_list;
+ T current_sum=((fun(x1)+fun(x2))/2.*(x2-x1));
+ interval_list.push_back(make_triple(x1,current_sum,true));
+ interval_list.push_back(make_triple(x2,(T)0.,true));
+ bool int_state=1;
+ int n_intervals=1;
+ while(int_state)
+ {
+ //std::cout<<n_intervals<<std::endl;
+ int_state=0;
+ typename std::list<interval>::iterator i1=interval_list.begin();
+ typename std::list<interval>::iterator i2=interval_list.begin();
+ i2++;
+ for(;i2!=interval_list.end();i1++,i2=i1,i2++)
+ {
+ //cout<<i1->first<<"\t"<<i2->first<<endl;
+ //assert(i2->first>i1->first);
+ if(i1->third)
+ {
+ interval new_interval((i1->first+i2->first)/2,0,true);
+
+ T sum1,sum2;
+ sum1=(fun(new_interval.first)+fun(i1->first))/2*(new_interval.first-i1->first);
+ sum2=(fun(new_interval.first)+fun(i2->first))/2*(i2->first-new_interval.first);
+ new_interval.second=sum2;
+ T err;
+ err=i1->second-sum1-sum2;
+ err=err<0?-err:err;
+
+ if(err>err_limit/n_intervals)
+ {
+ i1->second=sum1;
+ interval_list.insert(i2,new_interval);
+ n_intervals++;
+ if(n_intervals>10e6)
+ {
+
+ break;
+ }
+ }
+ else
+ {
+ i1->third=false;
+ }
+ int_state=1;
+ }
+ }
+
+ }
+ T result=0;
+ for(typename std::list<interval>::iterator i=interval_list.begin();i!=interval_list.end();i++)
+ {
+ result+=i->second;
+ }
+ return result;
+}
+
+#endif
+//end of the file
diff --git a/tools/cosmo_calc/calc_distance.cc b/tools/cosmo_calc/calc_distance.cc
new file mode 100644
index 0000000..4c96dc8
--- /dev/null
+++ b/tools/cosmo_calc/calc_distance.cc
@@ -0,0 +1,140 @@
+// calc_distance.cc
+// calculate commonly used cosmology distances
+// Ref: https://en.wikipedia.org/wiki/Distance_measures_(cosmology)
+//
+// Junhua Gu
+//
+// ChangeLogs:
+// 2012/08/12, LIweitiaNux
+// fix a bug in `calc_angular_distance()'
+// add `calc_transcomv_distance()' and
+// account `omega_k >0, <0' cases
+// add `calc_ligtrav_distance()'
+//
+
+#include <iostream>
+#include <cmath>
+#include <cstdlib>
+#include <cstddef>
+#include <cassert>
+// integrator
+#include "adapt_trapezoid.h"
+
+// extern variables in `calc_distance.h'
+extern double c;
+extern double km;
+extern double s;
+extern double Mpc;
+// cosmology parameters
+extern double H0; // units: [km/s/Mpc]
+extern double omega_m;
+extern double omega_l;
+extern double omega_k;
+// precision to determine float number equality
+extern double eps;
+
+using namespace std;
+
+// auxiliary functions
+// for `calc_comoving_distance'
+double f_comvdist(double z);
+// for `calc_ligtrav_distance'
+double f_ligdist(double z);
+double f_age(double z);
+
+/////////////////////////////////////////////////
+// main functions
+// //////////////////////////////////////////////
+// dimensionless Hubble parameter
+double E(double z)
+{
+ return sqrt(omega_m*(1+z)*(1+z)*(1+z) + omega_k*(1+z)*(1+z) + omega_l);
+}
+
+// Hubble distance
+double calc_hubble_distance(void)
+{
+ //
+ // cerr << "*** H0 = " << H0 << " ***" << endl;
+ //
+ double _H0_ = H0 * km/s/Mpc;
+ return c / _H0_;
+}
+
+// calc `comoving distance'
+double calc_comoving_distance(double z)
+{
+ double d_H = calc_hubble_distance();
+ return d_H * adapt_trapezoid(f_comvdist, 0.0, z, 1e-4);
+}
+
+// calc `transverse comoving distance'
+double calc_transcomv_distance(double z)
+{
+ if (fabs(omega_k) <= eps) {
+ // omega_k = 0
+ // flat, d_M(z) = d_C(z)
+ return calc_comoving_distance(z);
+ }
+ else if (omega_k > 0.0) {
+ // omega_k > 0
+ // d_M(z) = d_H/sqrt(omega_k) * sinh(sqrt(omega_k) * d_C(z) / d_H)
+ // ^ ^
+ double d_H = calc_hubble_distance();
+ double d_C = calc_comoving_distance(z);
+ return (d_H / sqrt(omega_k) * sinh(sqrt(omega_k) * d_C / d_H));
+ }
+ else {
+ // omega_k < 0
+ // d_M(z) = d_H/sqrt(-omega_k) * sin(sqrt(-omega_k) * d_C(z) / d_H)
+ // ^ ^ ^
+ double d_H = calc_hubble_distance();
+ double d_C = calc_comoving_distance(z);
+ return (d_H / sqrt(-omega_k) * sin(sqrt(-omega_k) * d_C / d_H));
+ }
+}
+
+// calc `angular diameter distance'
+// d_A(z) = d_M(z)/(1+z)
+double calc_angdia_distance(double z)
+{
+ return (calc_transcomv_distance(z) / (1+z));
+}
+
+// calc `luminoisity distance'
+// d_L(z) = (1+z)*d_M(z)
+double calc_luminosity_distance(double z)
+{
+ return (calc_transcomv_distance(z) * (1+z));
+}
+
+// calc `light-travel distance'
+// d_T(z) = d_H \int_0^z 1/((1+z)*E(z)) dz
+double calc_ligtrav_distance(double z)
+{
+ double d_H = calc_hubble_distance();
+ return d_H * adapt_trapezoid(f_ligdist, 0.0, z, 1e-4);
+}
+
+
+// auxiliary functions
+// for `calc_comoving_distance'
+double f_comvdist(double z)
+{
+ return 1.0/E(z);
+}
+
+// for `calc_ligtrav_distance'
+double f_ligdist(double z)
+{
+ return 1.0 / ((1+z)*E(z));
+}
+
+double f_age(double z)
+{
+ return f_comvdist(1.0/z) / (z*z);
+}
+
+
+// EOF
+/* vim: set ts=8 sw=4 tw=0 fenc=utf-8 ft=cpp: */
diff --git a/tools/cosmo_calc/calc_distance.h b/tools/cosmo_calc/calc_distance.h
new file mode 100644
index 0000000..a1f3352
--- /dev/null
+++ b/tools/cosmo_calc/calc_distance.h
@@ -0,0 +1,59 @@
+// calc_distance.h
+// calculate commonly used cosmology distances
+// Ref: https://en.wikipedia.org/wiki/Distance_measures_(cosmology)
+//
+// Junhua Gu
+//
+// ChangeLogs:
+// 2012/08/12, LIweitiaNux
+// fix a bug in `calc_angular_distance()'
+// add `calc_transcomv_distance()' and
+// account `omega_k >0, <0' cases
+//
+
+#ifndef CALC_DISTANCE_H
+#define CALC_DISTANCE_H
+
+double cm=1.0;
+double s=1.0;
+double km=1000*100*cm;
+double Mpc=3.08568e+24*cm;
+double kpc=3.08568e+21*cm;
+double yr=365.*24.*3600.*s;
+double Gyr=1e9*yr;
+double arcsec2arc_ratio=1./60/60/180*3.1415926;
+double c=299792458.*100.*cm;
+
+// cosmology parameters
+double H0=71.0; // units: [km/s/Mpc]
+double omega_m=0.27;
+double omega_l=0.73;
+double omega_k=1.0-omega_m-omega_l;
+
+// precision to determine float number equality
+double eps=1.0e-7;
+
+// open functions
+//
+// dimensionless Hubble parameter
+extern double E(double z);
+// Hubble distance
+extern double calc_hubble_distance(void);
+// calc `comoving distance'
+extern double calc_comoving_distance(double z);
+// calc `transverse comoving distance'
+extern double calc_transcomv_distance(double z);
+// calc `angular diameter distance'
+// d_A(z) = d_M(z)/(1+z)
+extern double calc_angdia_distance(double z);
+// calc `luminoisity distance'
+// d_L(z) = (1+z)*d_M(z)
+extern double calc_luminosity_distance(double z);
+// calc `light-travel distance'
+// d_T(z) = d_H \int_0^z 1/((1+z)*E(z)) dz
+extern double calc_ligtrav_distance(double z);
+
+#endif /* CALC_DISTANCE_H */
+
+//EOF
+/* vim: set ts=8 sw=4 tw=0 fenc=utf-8 ft=cpp: */
diff --git a/tools/cosmo_calc/calc_distance.o b/tools/cosmo_calc/calc_distance.o
new file mode 100644
index 0000000..d3c7cf5
--- /dev/null
+++ b/tools/cosmo_calc/calc_distance.o
Binary files differ
diff --git a/tools/cosmo_calc/cosmo_calc b/tools/cosmo_calc/cosmo_calc
new file mode 100755
index 0000000..58bc46b
--- /dev/null
+++ b/tools/cosmo_calc/cosmo_calc
Binary files differ
diff --git a/tools/cosmo_calc/ddivid.cc b/tools/cosmo_calc/ddivid.cc
new file mode 100644
index 0000000..aa72471
--- /dev/null
+++ b/tools/cosmo_calc/ddivid.cc
@@ -0,0 +1,56 @@
+#include <cassert>
+
+double ddivid(double (*foo)(double),double x1,double x2,double err)
+{
+ assert(x2>x1);
+ assert(foo(x2)*foo(x1)<0);
+ while(x2-x1>err)
+ {
+ double x=(x2+x1)/2.;
+ double y=foo(x);
+ double y1=foo(x1);
+ double y2=foo(x2);
+ if(y1*y<0)
+ {
+ x2=x;
+ }
+ else if(y2*y<0)
+ {
+ x1=x;
+ }
+ else
+ {
+ assert(0);
+ }
+
+ }
+ return (x1+x2)/2.;
+}
+
+
+double ddivid(double (*foo)(double),double z,double x1,double x2,double err)
+{
+ assert(x2>x1);
+ assert((foo(x2)-z)*(foo(x1)-z)<0);
+ while(x2-x1>err)
+ {
+ double x=(x2+x1)/2.;
+ double y=foo(x)-z;
+ double y1=foo(x1)-z;
+ double y2=foo(x2)-z;
+ if(y1*y<0)
+ {
+ x2=x;
+ }
+ else if(y2*y<0)
+ {
+ x1=x;
+ }
+ else
+ {
+ assert(0);
+ }
+
+ }
+ return (x1+x2)/2.;
+}
diff --git a/tools/cosmo_calc/ddivid.h b/tools/cosmo_calc/ddivid.h
new file mode 100644
index 0000000..a957de5
--- /dev/null
+++ b/tools/cosmo_calc/ddivid.h
@@ -0,0 +1,8 @@
+#ifndef DDIVID
+#define DDIVID
+
+extern double ddivid(double (*foo)(double),double x1,double x2,double err);
+extern double ddivid(double (*foo)(double),double z,double x1,double x2,double err);
+
+#endif
+//EOF
diff --git a/tools/cosmo_calc/ddivid.o b/tools/cosmo_calc/ddivid.o
new file mode 100644
index 0000000..5452fc3
--- /dev/null
+++ b/tools/cosmo_calc/ddivid.o
Binary files differ
diff --git a/tools/cosmo_calc/main.cc b/tools/cosmo_calc/main.cc
new file mode 100644
index 0000000..bbd73ba
--- /dev/null
+++ b/tools/cosmo_calc/main.cc
@@ -0,0 +1,140 @@
+//
+// simple `cosmology calculator'
+// can calc commonly used cosmology distances:
+// hubble distance
+// comoving distance
+// transverse comoving distance
+// angular diameter distance
+// luminoisity distance
+// light-travel distance
+// in addition, this calculator also calc some other
+// useful infomation related with Chandra
+//
+// Junhua Gu
+//
+// Modified by: LIweitiaNux
+//
+// ChangeLogs:
+// v2.1, 2012/08/12, LIweitiaNux
+// improve cmdline parameters
+// v2.2, 2013/02/09, LIweitiaNux
+// add 'hubble_parameter E(z)'
+// modify output format
+//
+
+
+#include "calc_distance.h"
+#include "ddivid.h"
+#include <cstdio>
+#include <cstdlib>
+#include <cmath>
+#include <iostream>
+
+using namespace std;
+
+// extern variables in `calc_distance.h'
+extern double cm;
+extern double s;
+extern double km;
+extern double Mpc;
+extern double kpc;
+extern double yr;
+extern double Gyr;
+extern double H0; // units: [km/s/Mpc]
+extern double c;
+extern double omega_m;
+extern double omega_l;
+extern double omega_k;
+extern double arcsec2arc_ratio;
+
+// README, ABOUT
+static char DESC[] = "simple cosmology calculator";
+static char VER[] = "v2.2, 2013-02-09";
+
+// setting parameters
+static double PI = 4*atan(1.0);
+// chandra related
+static double arcsec_per_pixel = 0.492;
+// above cosmology paramters can also be changed
+// through cmdline paramters
+
+// functions
+void usage(const char *name);
+
+//////////////////////////////////////////////
+// main part
+//////////////////////////////////////////////
+int main(int argc,char* argv[])
+{
+ double z; // given redshift
+
+ // cmdline parameters
+ if (argc == 2) {
+ // get input redshift
+ z = atof(argv[1]);
+ }
+ else if (argc == 3) {
+ z = atof(argv[1]);
+ // use specified `H0'
+ H0 = atof(argv[2]);
+ }
+ else if (argc == 4) {
+ z = atof(argv[1]);
+ H0 = atof(argv[2]);
+ // get specified `Omega_M'
+ omega_m = atof(argv[3]);
+ omega_l = 1.0-omega_m;
+ }
+ else {
+ usage(argv[0]);
+ exit(-1);
+ }
+
+ // calc `Hubble parameter E(z)'
+ double E_z = E(z);
+ // calc `comoving distance'
+ double d_C = calc_comoving_distance(z);
+ // calc `angular diameter distance'
+ double d_A = calc_angdia_distance(z);
+ // calc `luminoisity distance'
+ double d_L = calc_luminosity_distance(z);
+
+ // output results
+ // parameters
+ printf("Parameters:\n");
+ printf(" z= %lf, H0= %lf, Omega_M= %lf, Omega_L= %lf\n",
+ z, H0, omega_m, omega_l);
+ printf("Distances:\n");
+ printf(" Comoving_distance: D_C(%lf)= %lg [cm], %lf [Mpc]\n",
+ z, d_C, d_C/Mpc);
+ printf(" Angular_diameter_distance: D_A(%lf)= %lg [cm], %lf [Mpc]\n",
+ z, d_A, d_A/Mpc);
+ printf(" Luminoisity_distance: D_L(%lf)= %lg [cm], %lf [Mpc]\n",
+ z, d_L, d_L/Mpc);
+ printf("Chandra_related:\n");
+ printf(" kpc/pixel (D_A): %lf\n",
+ (d_A / kpc * arcsec_per_pixel* arcsec2arc_ratio));
+ printf(" cm/pixel (D_A): %lg\n",
+ (d_A * arcsec_per_pixel* arcsec2arc_ratio));
+ printf("Other_data:\n");
+ printf(" Hubble_parameter: E(%lf)= %lf\n", z, E_z);
+ printf(" kpc/arcsec (D_A): %lf\n", (d_A / kpc * arcsec2arc_ratio));
+ printf(" norm (cooling_function): %lg\n",
+ (1e-14 / (4.0 * PI * pow(d_A*(1+z), 2))));
+
+ //cout<<ddivid(calc_distance,d,0,1,.0001)<<endl;
+
+ return 0;
+}
+
+// other auxiliary functions
+void usage(const char *name)
+{
+ cerr << "Usage: " << endl;
+ cerr << " " << name << " z [H0] [Omega_M]" << endl;
+ // description
+ cout << endl << "About:" << endl;
+ cout << DESC << endl << VER << endl;
+}
+
+/* vim: set ts=8 sw=4 tw=0 fenc=utf-8 ft=cpp: */
diff --git a/tools/cosmo_calc/main.o b/tools/cosmo_calc/main.o
new file mode 100644
index 0000000..66b1c4f
--- /dev/null
+++ b/tools/cosmo_calc/main.o
Binary files differ