aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAaron LI <aaronly.me@outlook.com>2015-06-03 14:02:51 +0800
committerAaron LI <aaronly.me@outlook.com>2015-06-03 14:02:51 +0800
commitb5f43f1b99c8b181af2c657a5b95616f09ad8232 (patch)
tree342cee06dafea1ae7692b049d217c53ebc5a9a6b
parent57ff0ed9c57a2c4bca2e7e1a592b6636e0457887 (diff)
downloadchandra-acis-analysis-b5f43f1b99c8b181af2c657a5b95616f09ad8232.tar.bz2
Update various script to use localized pfiles to avoid collisons.
* Copy needed pfiles to current working directory, and set environment variable $PFILES to use these first. Thus multiple instance of CIAO tools can be run at the same without pfiles collsions. * Replace 'ls' with '\ls', 'grep' with '\grep' * Many minor updates/fixes
-rwxr-xr-xscripts/analyze_path.sh2
-rwxr-xr-xscripts/calc_lx_simple.sh61
-rwxr-xr-xscripts/chandra_bkg_rescale.sh32
-rwxr-xr-xscripts/chandra_ccdgap_rect.py6
-rwxr-xr-xscripts/chandra_collect_data_v3.sh296
-rwxr-xr-xscripts/chandra_gensbpreg.sh56
-rwxr-xr-xscripts/chandra_genspcreg.sh70
-rwxr-xr-xscripts/chandra_pb_flux.sh38
-rwxr-xr-xscripts/chandra_update_xcentroid.sh64
-rwxr-xr-xscripts/chandra_xcentroid.sh66
-rwxr-xr-xscripts/chandra_xpeak_coord.sh52
-rwxr-xr-xscripts/ciao_bkg_spectra.sh156
-rwxr-xr-xscripts/ciao_blanksky.sh39
-rwxr-xr-xscripts/ciao_calc_csb.sh81
-rwxr-xr-xscripts/ciao_calc_ct.sh151
-rwxr-xr-xscripts/ciao_calc_ct_csb.sh24
-rwxr-xr-xscripts/ciao_check_offset.sh49
-rwxr-xr-xscripts/ciao_deproj_spectra.sh31
-rwxr-xr-xscripts/ciao_expcorr.sh102
-rwxr-xr-xscripts/ciao_expcorr_sbp.sh28
-rwxr-xr-xscripts/ciao_genregs.sh78
-rwxr-xr-xscripts/ciao_procevt.sh50
-rwxr-xr-xscripts/ciao_r500avgt.sh125
-rwxr-xr-xscripts/ciao_sbp.sh60
-rwxr-xr-xscripts/make_infojson.sh32
25 files changed, 1122 insertions, 627 deletions
diff --git a/scripts/analyze_path.sh b/scripts/analyze_path.sh
index 684fc9c..9a1a7b9 100755
--- a/scripts/analyze_path.sh
+++ b/scripts/analyze_path.sh
@@ -3,7 +3,7 @@
analyze_path() {
# extract `obs_id' and `source name' from path
#
- # LIweitiaNux <liweitianux@gmail.com>
+ # Weitian LI <liweitianux@gmail.com>
# 2013/02/04
#
# input:
diff --git a/scripts/calc_lx_simple.sh b/scripts/calc_lx_simple.sh
index a13c67b..a5ae978 100755
--- a/scripts/calc_lx_simple.sh
+++ b/scripts/calc_lx_simple.sh
@@ -1,20 +1,23 @@
#!/bin/sh
-#
+##
+## Run calc_lx in BATCH mode
+##
+## Weitian LI <liweitianux@gmail.com>
+## Created: 2012/08/31
+##
+UPDATED="2014/06/18"
+##
+## Changelogs:
+## 2015/06/03:
+## * Replaced 'grep' with '\grep', 'ls' with '\ls'
+## 2014/06/18:
+## * use env variable 'MASS_PROFILE_DIR'
+##
+
unalias -a
export LC_COLLATE=C
# fix path for python
-export PATH="/usr/bin:$PATH"
-###########################################################
-## calc_lx in BATCH mode ##
-## ##
-## LIweitiaNux <liweitianux@gmail.com> ##
-## August 31, 2012 ##
-## ##
-## ChangeLog: ##
-## 2014/06/18: use env variable 'MASS_PROFILE_DIR' ##
-###########################################################
-
-UPDATED="2014/06/18"
+export PATH="/usr/bin:/usr/local/bin:$PATH"
## usage, `path_conffile' is the configuration file
## which contains the `path' to each `repro/mass' directory
@@ -64,25 +67,25 @@ else
[ -e "${LOGFILE}" ] && mv -fv ${LOGFILE} ${LOGFILE}_bak
TOLOG="tee -a ${LOGFILE}"
# fitting_mass logfile, get R500 from it
- MLOG=`ls ${MCONF%.[confgCONFG]*}*.log | tail -n 1`
- R500_VAL=`tail -n ${RES_LINE} ${MLOG} | grep '^r500' | awk '{ print $2 }'`
- R200_VAL=`tail -n ${RES_LINE} ${MLOG} | grep '^r200' | awk '{ print $2 }'`
+ MLOG=`\ls ${MCONF%.[confgCONFG]*}*.log | tail -n 1`
+ R500_VAL=`tail -n ${RES_LINE} ${MLOG} | \grep '^r500' | awk '{ print $2 }'`
+ R200_VAL=`tail -n ${RES_LINE} ${MLOG} | \grep '^r200' | awk '{ print $2 }'`
# radius_sbp_file {{{
- RSBP=`grep '^radius_sbp_file' ${MCONF} | awk '{ print $2 }'`
+ RSBP=`\grep '^radius_sbp_file' ${MCONF} | awk '{ print $2 }'`
TMP_RSBP="_tmp_rsbp.txt"
[ -e "${TMP_RSBP}" ] && rm -f ${TMP_RSBP}
- cat ${RSBP} | sed 's/#.*$//' | grep -Ev '^\s*$' > ${TMP_RSBP}
+ cat ${RSBP} | sed 's/#.*$//' | \grep -Ev '^\s*$' > ${TMP_RSBP}
RSBP="${TMP_RSBP}"
# rsbp }}}
- TPRO_TYPE=`grep '^t_profile' ${MCONF} | awk '{ print $2 }'`
- TPRO_DATA=`grep '^t_data_file' ${MCONF} | awk '{ print $2 }'`
- TPRO_PARA=`grep '^t_param_file' ${MCONF} | awk '{ print $2 }'`
- SBP_CONF=`grep '^sbp_cfg' ${MCONF} | awk '{ print $2 }'`
- ABUND=`grep '^abund' ${MCONF} | awk '{ print $2 }'`
- NH=`grep '^nh' ${MCONF} | awk '{ print $2 }'`
- Z=`grep '^z' ${SBP_CONF} | awk '{ print $2 }'`
- cm_per_pixel=`grep '^cm_per_pixel' ${SBP_CONF} | awk '{ print $2 }'`
- CF_FILE=`grep '^cfunc_file' ${SBP_CONF} | awk '{ print $2 }'`
+ TPRO_TYPE=`\grep '^t_profile' ${MCONF} | awk '{ print $2 }'`
+ TPRO_DATA=`\grep '^t_data_file' ${MCONF} | awk '{ print $2 }'`
+ TPRO_PARA=`\grep '^t_param_file' ${MCONF} | awk '{ print $2 }'`
+ SBP_CONF=`\grep '^sbp_cfg' ${MCONF} | awk '{ print $2 }'`
+ ABUND=`\grep '^abund' ${MCONF} | awk '{ print $2 }'`
+ NH=`\grep '^nh' ${MCONF} | awk '{ print $2 }'`
+ Z=`\grep '^z' ${SBP_CONF} | awk '{ print $2 }'`
+ cm_per_pixel=`\grep '^cm_per_pixel' ${SBP_CONF} | awk '{ print $2 }'`
+ CF_FILE=`\grep '^cfunc_file' ${SBP_CONF} | awk '{ print $2 }'`
printf "## use logfile: \`${LOGFILE}'\n"
printf "## working directory: \``pwd -P`'\n" | ${TOLOG}
printf "## use configuration files: \`${MCONF}, ${SBP_CONF}'\n" | ${TOLOG}
@@ -121,8 +124,8 @@ else
## flux_ratio }}}
printf "## CMD: ${CALCLX_SCRIPT} ${RSBP} ${FLUX_RATIO} ${Z} ${R500_VAL} ${TPRO_DATA}\n" | ${TOLOG}
printf "## CMD: ${CALCLX_SCRIPT} ${RSBP} ${FLUX_RATIO} ${Z} ${R200_VAL} ${TPRO_DATA}\n" | ${TOLOG}
- L500=`${CALCLX_SCRIPT} ${RSBP} ${FLUX_RATIO} ${Z} ${R500_VAL} ${TPRO_DATA} | grep '^Lx' | awk '{ print $2,$3,$4 }'`
- L200=`${CALCLX_SCRIPT} ${RSBP} ${FLUX_RATIO} ${Z} ${R200_VAL} ${TPRO_DATA} | grep '^Lx' | awk '{ print $2,$3,$4 }'`
+ L500=`${CALCLX_SCRIPT} ${RSBP} ${FLUX_RATIO} ${Z} ${R500_VAL} ${TPRO_DATA} | \grep '^Lx' | awk '{ print $2,$3,$4 }'`
+ L200=`${CALCLX_SCRIPT} ${RSBP} ${FLUX_RATIO} ${Z} ${R200_VAL} ${TPRO_DATA} | \grep '^Lx' | awk '{ print $2,$3,$4 }'`
printf "L500= ${L500} erg/s\n" | ${TOLOG}
printf "L200= ${L200} erg/s\n" | ${TOLOG}
fi
diff --git a/scripts/chandra_bkg_rescale.sh b/scripts/chandra_bkg_rescale.sh
index ae13af1..8d4abc7 100755
--- a/scripts/chandra_bkg_rescale.sh
+++ b/scripts/chandra_bkg_rescale.sh
@@ -3,8 +3,15 @@
# background rescale, by adjusting `BACKSCAL'
# according to the photon flux values in `9.5-12.0 keV'
#
-# LIweitiaNux <liweitianux@gmail.com>
-# August 14, 2012
+# Weitian LI <liweitianux@gmail.com>
+# 2012/08/14
+#
+# Changelogs:
+# 2015/06/03, Aaron LI
+# * Copy needed pfiles to tmp directory,
+# set environment variable $PFILES to use these first.
+# and remove them after usage.
+#
## background rescale (BACKSCAL) {{{
# rescale background according to particle background
@@ -39,7 +46,7 @@ bkg_rescale() {
}
## bkg rescale }}}
-if [ $# -ne 2 ]; then
+if [ $# -ne 2 ] || [ "x$1" = "x-h" ]; then
printf "usage:\n"
printf " `basename $0` <src_spec> <bkg_spec>\n"
printf "\nNOTE:\n"
@@ -47,8 +54,27 @@ if [ $# -ne 2 ]; then
exit 1
fi
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmstat dmkeypar dmhedit"
+
+PFILES_TMPDIR="/tmp/pfiles-$$"
+[ -d "${PFILES_TMPDIR}" ] && rm -rf ${PFILES_TMPDIR} || mkdir ${PFILES_TMPDIR}
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} ${PFILES_TMPDIR}/
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="${PFILES_TMPDIR}:${PFILES}"
+## pfiles }}}
+
# perform `bkg_rescale'
bkg_rescale "$1" "$2"
+# clean pfiles
+rm -rf ${PFILES_TMPDIR}
+
exit 0
diff --git a/scripts/chandra_ccdgap_rect.py b/scripts/chandra_ccdgap_rect.py
index 60bcdac..d95129a 100755
--- a/scripts/chandra_ccdgap_rect.py
+++ b/scripts/chandra_ccdgap_rect.py
@@ -1,6 +1,5 @@
#!/usr/bin/python
-
import sys
import math
import re
@@ -71,10 +70,11 @@ def poly2rect(plist):
if __name__=='__main__':
if len(sys.argv)!=2:
- print("Usage:%s %s"%(sys.argv[0]," <input regfile, including only polygens>"))
+ print("Usage:")
+ print(" %s <input regfile (only polygens)>" % sys.argv[0])
sys.exit()
for i in open(sys.argv[1]):
if re.match('.*olygon',i):
reg=poly2rect(parse_poly(i))
print(reg)
-
+
diff --git a/scripts/chandra_collect_data_v3.sh b/scripts/chandra_collect_data_v3.sh
index a1b56d8..d78d1da 100755
--- a/scripts/chandra_collect_data_v3.sh
+++ b/scripts/chandra_collect_data_v3.sh
@@ -3,38 +3,39 @@
unalias -a
export LC_COLLATE=C
# fix path for python
-export PATH=/usr/bin:$PATH
+export PATH=/usr/bin:/usr/local/bin:$PATH
#########################################################################
-## collect needed data and save to a JSON file
+## Collect the calculated mass data and write to the JSON file.
##
-## LIweitiaNux <liweitianux@gmail.com>
+## Weitian LI <liweitianux@gmail.com>
## August 31, 2012
##
## ChangeLogs:
## check JSON syntax, modify output to agree with the syntax
## Ref: http://json.parser.online.fr/
-## v1.1, 2012/09/05, LIweitiaNux
+## v1.1, 2012/09/05, Weitian LI
## add `T_avg(0.2-0.5 R500)' and `T_err'
-## v2.0, 2012/10/14, LIweitiaNux
+## v2.0, 2012/10/14, Weitian LI
## add parameters
-## v2.1, 2012/11/07, LIweitiaNux
+## v2.1, 2012/11/07, Weitian LI
## account for `fitting_dbeta'
-## v2.2, 2012/12/18, LIweitiaNux
+## v2.2, 2012/12/18, Weitian LI
## add `beta' and `cooling time' parameters
-## v3.0, 2013/02/09, LIweitiaNux
+## v3.0, 2013/02/09, Weitian LI
## modified for `new sample info format'
-## v3.1, 2013/05/18, LIweitiaNux
+## v3.1, 2013/05/18, Weitian LI
## add key `Feature'
-## v3.2, 2013/05/29, LIweitiaNux
+## v3.2, 2013/05/29, Weitian LI
## add key `XCNTRD_RA, XCNTRD_DEC'
-## v3.3, 2013/10/14, LIweitiaNux
+## v3.3, 2013/10/14, Weitian LI
## add key `Unified Name'
-#########################################################################
-
-## about, used in `usage' {{{
-VERSION="v3.0"
-UPDATE="2013-02-09"
-## about }}}
+## v3.4, 2015/06/03, Aaron LI
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Replaced 'grep' with '\grep', 'ls' with '\ls'
+##
+VERSION="v3.4"
+UPDATED="2015/06/03"
## error code {{{
ERR_USG=1
@@ -56,7 +57,7 @@ case "$1" in
printf "usage:\n"
printf " `basename $0` json=<info_json> cfg=<main_cfg> res=<final_result> basedir=<repro_dir> massdir=<mass_dir>\n"
printf "\nversion:\n"
- printf "${VERSION}, ${UPDATE}\n"
+ printf "${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -110,16 +111,16 @@ INIT_DIR=`pwd -P`
# check given parameters
# check given dir
-if [ -d "${basedir}" ] && ls ${basedir}/*repro_evt2.fits > /dev/null 2>&1; then
+if [ -d "${basedir}" ] && \ls ${basedir}/*repro_evt2.fits > /dev/null 2>&1; then
BASEDIR=${basedir}
-elif [ -d "${DFT_BASEDIR}" ] && ls ${DFT_BASEDIR}/*repro_evt2.fits > /dev/null 2>&1; then
+elif [ -d "${DFT_BASEDIR}" ] && \ls ${DFT_BASEDIR}/*repro_evt2.fits > /dev/null 2>&1; then
BASEDIR=${DFT_BASEDIR}
else
read -p "> basedir (contains info json): " BASEDIR
if [ ! -d "${BASEDIR}" ]; then
printf "ERROR: given \`${BASEDIR}' NOT a directory\n"
exit ${ERR_DIR}
- elif ! ls ${BASEDIR}/*repro_evt2.fits > /dev/null 2>&1; then
+ elif ! \ls ${BASEDIR}/*repro_evt2.fits > /dev/null 2>&1; then
printf "ERROR: given \`${BASEDIR}' NOT contains needed evt files\n"
exit ${ERR_DIR}
fi
@@ -145,8 +146,8 @@ printf "## use massdir: \`${MASS_DIR}'\n"
# check json file
if [ ! -z "${json}" ] && [ -r "${BASEDIR}/${json}" ]; then
JSON_FILE="${json}"
-elif [ "`ls ${BASEDIR}/${DFT_JSON_PAT} | wc -l`" -eq 1 ]; then
- JSON_FILE=`( cd ${BASEDIR} && ls ${DFT_JSON_PAT} )`
+elif [ "`\ls ${BASEDIR}/${DFT_JSON_PAT} | wc -l`" -eq 1 ]; then
+ JSON_FILE=`( cd ${BASEDIR} && \ls ${DFT_JSON_PAT} )`
else
read -p "> info json file: " JSON_FILE
if ! [ -r "${BASEDIR}/${JSON_FILE}" ]; then
@@ -168,7 +169,7 @@ else
fi
fi
printf "## use main config file: \`${CFG_FILE}'\n"
-SBP_CFG=`grep '^sbp_cfg' ${CFG_FILE} | awk '{ print $2 }'`
+SBP_CFG=`\grep '^sbp_cfg' ${CFG_FILE} | awk '{ print $2 }'`
printf "## sbp config file: \`${SBP_CFG}'\n"
# check final result file
if [ ! -z "${res}" ] && [ -r "${res}" ]; then
@@ -198,20 +199,33 @@ IMG_DIR="${BASEDIR}/img"
SPEC_DIR="${BASEDIR}/spc/profile"
## dir & file }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmkeypar"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
cd ${BASEDIR}
printf "## enter directory: `pwd -P`\n"
## in dir `repro' {{{
punlearn dmkeypar
-EVT_RAW=`ls *repro_evt2.fits`
+EVT_RAW=`\ls *repro_evt2.fits`
OBS_ID=`dmkeypar ${EVT_RAW} OBS_ID echo=yes`
DATE_OBS=`dmkeypar ${EVT_RAW} DATE-OBS echo=yes`
EXPOSURE_RAW=`dmkeypar ${EVT_RAW} EXPOSURE echo=yes | awk '{ print $1/1000 }'`
## ACIS_TYPE
DETNAM=`dmkeypar ${EVT_RAW} DETNAM echo=yes`
-if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+if echo ${DETNAM} | \grep -q 'ACIS-0123'; then
ACIS_TYPE="ACIS-I"
-elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+elif echo ${DETNAM} | \grep -q 'ACIS-[0-6]*7'; then
ACIS_TYPE="ACIS-S"
else
printf "*** ERROR: unknown detector type: ${DETNAM}\n"
@@ -221,7 +235,7 @@ fi
## in dir `repro/evt' {{{
cd ${EVT_DIR}
-EVT_CLEAN=`ls evt2_c*_clean.fits`
+EVT_CLEAN=`\ls evt2_c*_clean.fits`
EXPOSURE_CLEAN=`dmkeypar ${EVT_CLEAN} EXPOSURE echo=yes | awk '{ print $1/1000 }'`
## dir `repro/evt' }}}
@@ -229,26 +243,26 @@ EXPOSURE_CLEAN=`dmkeypar ${EVT_CLEAN} EXPOSURE echo=yes | awk '{ print $1/1000 }
cd ${MASS_DIR}
# misc {{{
-N_H=`grep '^nh' ${CFG_FILE} | awk '{ print $2 }'`
-Z=`grep '^z' ${SBP_CFG} | awk '{ print $2 }'`
-T_DATA_FILE=`grep '^t_data_file' ${CFG_FILE} | awk '{ print $2 }'`
-NFW_RMIN_KPC=`grep '^nfw_rmin_kpc' ${CFG_FILE} | awk '{ print $2 }'`
-E_Z=`${COSMO_CALC} ${Z} | grep -i 'Hubble_parameter' | awk '{ print $3 }'`
-KPC_PER_PIXEL=`${COSMO_CALC} ${Z} | grep 'kpc/pixel' | awk '{ print $3 }'`
-RADIUS_SBP_FILE=`grep '^radius_sbp_file' ${CFG_FILE} | awk '{ print $2 }'`
+N_H=`\grep '^nh' ${CFG_FILE} | awk '{ print $2 }'`
+Z=`\grep '^z' ${SBP_CFG} | awk '{ print $2 }'`
+T_DATA_FILE=`\grep '^t_data_file' ${CFG_FILE} | awk '{ print $2 }'`
+NFW_RMIN_KPC=`\grep '^nfw_rmin_kpc' ${CFG_FILE} | awk '{ print $2 }'`
+E_Z=`${COSMO_CALC} ${Z} | \grep -i 'Hubble_parameter' | awk '{ print $3 }'`
+KPC_PER_PIXEL=`${COSMO_CALC} ${Z} | \grep 'kpc/pixel' | awk '{ print $3 }'`
+RADIUS_SBP_FILE=`\grep '^radius_sbp_file' ${CFG_FILE} | awk '{ print $2 }'`
RMAX_SBP_PIX=`tail -n 1 ${RADIUS_SBP_FILE} | awk '{ print $1+$2 }'`
RMAX_SBP_KPC=`echo "${RMAX_SBP_PIX} ${KPC_PER_PIXEL}" | awk '{ printf("%.2f", $1*$2) }'`
SPC_DIR=`eval ${DFT_SPC_DIR_CMD}`
if [ -f "${SPC_DIR}/${DFT_RSPEC_REG}" ]; then
- RMAX_TPRO_PIX=`grep -iE '(pie|annulus)' ${SPC_DIR}/${DFT_RSPEC_REG} | tail -n 1 | awk -F',' '{ print $4 }'`
+ RMAX_TPRO_PIX=`\grep -iE '(pie|annulus)' ${SPC_DIR}/${DFT_RSPEC_REG} | tail -n 1 | awk -F',' '{ print $4 }'`
RMAX_TPRO_KPC=`echo "${RMAX_TPRO_PIX} ${KPC_PER_PIXEL}" | awk '{ printf("%.2f", $1*$2) }'`
fi
IMG_DIR=`eval ${DFT_IMG_DIR_CMD}`
-EXPCORR_CONF=`ls ${IMG_DIR}/${DFT_EXPCORR_CONF_PAT} 2> /dev/null`
+EXPCORR_CONF=`\ls ${IMG_DIR}/${DFT_EXPCORR_CONF_PAT} 2> /dev/null`
echo "EXPCORR_CONF: ${EXPCORR_CONF}"
if [ -f "${EXPCORR_CONF}" ]; then
- T_REF=`grep '^temp' ${EXPCORR_CONF} | awk '{ print $2 }'`
- Z_REF=`grep '^abund' ${EXPCORR_CONF} | awk '{ print $2 }'`
+ T_REF=`\grep '^temp' ${EXPCORR_CONF} | awk '{ print $2 }'`
+ Z_REF=`\grep '^abund' ${EXPCORR_CONF} | awk '{ print $2 }'`
fi
[ -z "${NFW_RMIN_KPC}" ] && NFW_RMIN_KPC="null"
[ -z "${RMAX_SBP_PIX}" ] && RMAX_SBP_PIX="null"
@@ -258,17 +272,17 @@ fi
# misc }}}
## determine single/double beta {{{
-if grep -q '^beta2' ${SBP_CFG}; then
+if \grep -q '^beta2' ${SBP_CFG}; then
MODEL_SBP="double-beta"
- n01=`grep '^n01' ${RES_FINAL} | awk '{ print $2 }'`
- beta1=`grep '^beta1' ${RES_FINAL} | awk '{ print $2 }'`
- rc1=`grep -E '^rc1\s' ${RES_FINAL} | awk '{ print $2 }'`
- rc1_kpc=`grep '^rc1_kpc' ${RES_FINAL} | awk '{ print $2 }'`
- n02=`grep '^n02' ${RES_FINAL} | awk '{ print $2 }'`
- beta2=`grep '^beta2' ${RES_FINAL} | awk '{ print $2 }'`
- rc2=`grep -E '^rc2\s' ${RES_FINAL} | awk '{ print $2 }'`
- rc2_kpc=`grep '^rc2_kpc' ${RES_FINAL} | awk '{ print $2 }'`
- BKG=`grep '^bkg' ${RES_FINAL} | awk '{ print $2 }'`
+ n01=`\grep '^n01' ${RES_FINAL} | awk '{ print $2 }'`
+ beta1=`\grep '^beta1' ${RES_FINAL} | awk '{ print $2 }'`
+ rc1=`\grep -E '^rc1\s' ${RES_FINAL} | awk '{ print $2 }'`
+ rc1_kpc=`\grep '^rc1_kpc' ${RES_FINAL} | awk '{ print $2 }'`
+ n02=`\grep '^n02' ${RES_FINAL} | awk '{ print $2 }'`
+ beta2=`\grep '^beta2' ${RES_FINAL} | awk '{ print $2 }'`
+ rc2=`\grep -E '^rc2\s' ${RES_FINAL} | awk '{ print $2 }'`
+ rc2_kpc=`\grep '^rc2_kpc' ${RES_FINAL} | awk '{ print $2 }'`
+ BKG=`\grep '^bkg' ${RES_FINAL} | awk '{ print $2 }'`
# beta1 -> smaller rc; beta2 -> bigger rc
if [ `echo "${rc1} < ${rc2}" | bc -l` -eq 1 ]; then
N01=${n01}
@@ -296,30 +310,30 @@ else
BETA1="null"
RC1="null"
RC1_KPC="null"
- N02=`grep '^n0' ${RES_FINAL} | awk '{ print $2 }'`
- BETA2=`grep '^beta' ${RES_FINAL} | awk '{ print $2 }'`
- RC2=`grep -E '^rc\s' ${RES_FINAL} | awk '{ print $2 }'`
- RC2_KPC=`grep '^rc_kpc' ${RES_FINAL} | awk '{ print $2 }'`
- BKG=`grep '^bkg' ${RES_FINAL} | awk '{ print $2 }'`
+ N02=`\grep '^n0' ${RES_FINAL} | awk '{ print $2 }'`
+ BETA2=`\grep '^beta' ${RES_FINAL} | awk '{ print $2 }'`
+ RC2=`\grep -E '^rc\s' ${RES_FINAL} | awk '{ print $2 }'`
+ RC2_KPC=`\grep '^rc_kpc' ${RES_FINAL} | awk '{ print $2 }'`
+ BKG=`\grep '^bkg' ${RES_FINAL} | awk '{ print $2 }'`
fi
## single/double beta }}}
## get `mass/virial_radius/luminosity' {{{
# 200 data {{{
-R200_VAL=`grep '^r200' ${RES_FINAL} | awk '{ print $2 }'`
-R200_ERR_L=`grep '^r200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-R200_ERR_U=`grep '^r200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-M200_VAL=`grep '^m200' ${RES_FINAL} | awk '{ print $2 }'`
-M200_ERR_L=`grep '^m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-M200_ERR_U=`grep '^m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-L200_VAL=`grep '^L200' ${RES_FINAL} | awk '{ print $2 }'`
-L200_ERR=`grep '^L200' ${RES_FINAL} | awk '{ print $4 }'`
-MGAS200_VAL=`grep '^gas_m200' ${RES_FINAL} | awk '{ print $2 }'`
-MGAS200_ERR_L=`grep '^gas_m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-MGAS200_ERR_U=`grep '^gas_m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-FGAS200_VAL=`grep '^gas_fraction200' ${RES_FINAL} | awk '{ print $2 }'`
-FGAS200_ERR_L=`grep '^gas_fraction200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-FGAS200_ERR_U=`grep '^gas_fraction200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+R200_VAL=`\grep '^r200' ${RES_FINAL} | awk '{ print $2 }'`
+R200_ERR_L=`\grep '^r200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+R200_ERR_U=`\grep '^r200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+M200_VAL=`\grep '^m200' ${RES_FINAL} | awk '{ print $2 }'`
+M200_ERR_L=`\grep '^m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+M200_ERR_U=`\grep '^m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+L200_VAL=`\grep '^L200' ${RES_FINAL} | awk '{ print $2 }'`
+L200_ERR=`\grep '^L200' ${RES_FINAL} | awk '{ print $4 }'`
+MGAS200_VAL=`\grep '^gas_m200' ${RES_FINAL} | awk '{ print $2 }'`
+MGAS200_ERR_L=`\grep '^gas_m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+MGAS200_ERR_U=`\grep '^gas_m200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+FGAS200_VAL=`\grep '^gas_fraction200' ${RES_FINAL} | awk '{ print $2 }'`
+FGAS200_ERR_L=`\grep '^gas_fraction200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+FGAS200_ERR_U=`\grep '^gas_fraction200' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
[ -z "${R200_VAL}" ] && R200_VAL="null"
[ -z "${R200_ERR_L}" ] && R200_ERR_L="null"
[ -z "${R200_ERR_U}" ] && R200_ERR_U="null"
@@ -336,20 +350,20 @@ FGAS200_ERR_U=`grep '^gas_fraction200' ${RES_FINAL} | awk '{ print $3 }' | awk -
[ -z "${FGAS200_ERR_U}" ] && FGAS200_ERR_U="null"
# 200 }}}
# 500 data {{{
-R500_VAL=`grep '^r500' ${RES_FINAL} | awk '{ print $2 }'`
-R500_ERR_L=`grep '^r500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-R500_ERR_U=`grep '^r500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-M500_VAL=`grep '^m500' ${RES_FINAL} | awk '{ print $2 }'`
-M500_ERR_L=`grep '^m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-M500_ERR_U=`grep '^m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-L500_VAL=`grep '^L500' ${RES_FINAL} | awk '{ print $2 }'`
-L500_ERR=`grep '^L500' ${RES_FINAL} | awk '{ print $4 }'`
-MGAS500_VAL=`grep '^gas_m500' ${RES_FINAL} | awk '{ print $2 }'`
-MGAS500_ERR_L=`grep '^gas_m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-MGAS500_ERR_U=`grep '^gas_m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-FGAS500_VAL=`grep '^gas_fraction500' ${RES_FINAL} | awk '{ print $2 }'`
-FGAS500_ERR_L=`grep '^gas_fraction500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-FGAS500_ERR_U=`grep '^gas_fraction500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+R500_VAL=`\grep '^r500' ${RES_FINAL} | awk '{ print $2 }'`
+R500_ERR_L=`\grep '^r500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+R500_ERR_U=`\grep '^r500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+M500_VAL=`\grep '^m500' ${RES_FINAL} | awk '{ print $2 }'`
+M500_ERR_L=`\grep '^m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+M500_ERR_U=`\grep '^m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+L500_VAL=`\grep '^L500' ${RES_FINAL} | awk '{ print $2 }'`
+L500_ERR=`\grep '^L500' ${RES_FINAL} | awk '{ print $4 }'`
+MGAS500_VAL=`\grep '^gas_m500' ${RES_FINAL} | awk '{ print $2 }'`
+MGAS500_ERR_L=`\grep '^gas_m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+MGAS500_ERR_U=`\grep '^gas_m500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+FGAS500_VAL=`\grep '^gas_fraction500' ${RES_FINAL} | awk '{ print $2 }'`
+FGAS500_ERR_L=`\grep '^gas_fraction500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+FGAS500_ERR_U=`\grep '^gas_fraction500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
[ -z "${R500_VAL}" ] && R500_VAL="null"
[ -z "${R500_ERR_L}" ] && R500_ERR_L="null"
[ -z "${R500_ERR_U}" ] && R500_ERR_U="null"
@@ -366,20 +380,20 @@ FGAS500_ERR_U=`grep '^gas_fraction500' ${RES_FINAL} | awk '{ print $3 }' | awk -
[ -z "${FGAS500_ERR_U}" ] && FGAS500_ERR_U="null"
# 500 }}}
# 1500 data {{{
-R1500_VAL=`grep '^r1500' ${RES_FINAL} | awk '{ print $2 }'`
-R1500_ERR_L=`grep '^r1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-R1500_ERR_U=`grep '^r1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-M1500_VAL=`grep '^m1500' ${RES_FINAL} | awk '{ print $2 }'`
-M1500_ERR_L=`grep '^m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-M1500_ERR_U=`grep '^m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-L1500_VAL=`grep '^L1500' ${RES_FINAL} | awk '{ print $2 }'`
-L1500_ERR=`grep '^L1500' ${RES_FINAL} | awk '{ print $4 }'`
-MGAS1500_VAL=`grep '^gas_m1500' ${RES_FINAL} | awk '{ print $2 }'`
-MGAS1500_ERR_L=`grep '^gas_m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-MGAS1500_ERR_U=`grep '^gas_m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-FGAS1500_VAL=`grep '^gas_fraction1500' ${RES_FINAL} | awk '{ print $2 }'`
-FGAS1500_ERR_L=`grep '^gas_fraction1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-FGAS1500_ERR_U=`grep '^gas_fraction1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+R1500_VAL=`\grep '^r1500' ${RES_FINAL} | awk '{ print $2 }'`
+R1500_ERR_L=`\grep '^r1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+R1500_ERR_U=`\grep '^r1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+M1500_VAL=`\grep '^m1500' ${RES_FINAL} | awk '{ print $2 }'`
+M1500_ERR_L=`\grep '^m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+M1500_ERR_U=`\grep '^m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+L1500_VAL=`\grep '^L1500' ${RES_FINAL} | awk '{ print $2 }'`
+L1500_ERR=`\grep '^L1500' ${RES_FINAL} | awk '{ print $4 }'`
+MGAS1500_VAL=`\grep '^gas_m1500' ${RES_FINAL} | awk '{ print $2 }'`
+MGAS1500_ERR_L=`\grep '^gas_m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+MGAS1500_ERR_U=`\grep '^gas_m1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+FGAS1500_VAL=`\grep '^gas_fraction1500' ${RES_FINAL} | awk '{ print $2 }'`
+FGAS1500_ERR_L=`\grep '^gas_fraction1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+FGAS1500_ERR_U=`\grep '^gas_fraction1500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
[ -z "${R1500_VAL}" ] && R1500_VAL="null"
[ -z "${R1500_ERR_L}" ] && R1500_ERR_L="null"
[ -z "${R1500_ERR_U}" ] && R1500_ERR_U="null"
@@ -396,20 +410,20 @@ FGAS1500_ERR_U=`grep '^gas_fraction1500' ${RES_FINAL} | awk '{ print $3 }' | awk
[ -z "${FGAS1500_ERR_U}" ] && FGAS1500_ERR_U="null"
# 1500 }}}
# 2500 data {{{
-R2500_VAL=`grep '^r2500' ${RES_FINAL} | awk '{ print $2 }'`
-R2500_ERR_L=`grep '^r2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-R2500_ERR_U=`grep '^r2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-M2500_VAL=`grep '^m2500' ${RES_FINAL} | awk '{ print $2 }'`
-M2500_ERR_L=`grep '^m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-M2500_ERR_U=`grep '^m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-L2500_VAL=`grep '^L2500' ${RES_FINAL} | awk '{ print $2 }'`
-L2500_ERR=`grep '^L2500' ${RES_FINAL} | awk '{ print $4 }'`
-MGAS2500_VAL=`grep '^gas_m2500' ${RES_FINAL} | awk '{ print $2 }'`
-MGAS2500_ERR_L=`grep '^gas_m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-MGAS2500_ERR_U=`grep '^gas_m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
-FGAS2500_VAL=`grep '^gas_fraction2500' ${RES_FINAL} | awk '{ print $2 }'`
-FGAS2500_ERR_L=`grep '^gas_fraction2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
-FGAS2500_ERR_U=`grep '^gas_fraction2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+R2500_VAL=`\grep '^r2500' ${RES_FINAL} | awk '{ print $2 }'`
+R2500_ERR_L=`\grep '^r2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+R2500_ERR_U=`\grep '^r2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+M2500_VAL=`\grep '^m2500' ${RES_FINAL} | awk '{ print $2 }'`
+M2500_ERR_L=`\grep '^m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+M2500_ERR_U=`\grep '^m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+L2500_VAL=`\grep '^L2500' ${RES_FINAL} | awk '{ print $2 }'`
+L2500_ERR=`\grep '^L2500' ${RES_FINAL} | awk '{ print $4 }'`
+MGAS2500_VAL=`\grep '^gas_m2500' ${RES_FINAL} | awk '{ print $2 }'`
+MGAS2500_ERR_L=`\grep '^gas_m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+MGAS2500_ERR_U=`\grep '^gas_m2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+FGAS2500_VAL=`\grep '^gas_fraction2500' ${RES_FINAL} | awk '{ print $2 }'`
+FGAS2500_ERR_L=`\grep '^gas_fraction2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $1 }'`
+FGAS2500_ERR_U=`\grep '^gas_fraction2500' ${RES_FINAL} | awk '{ print $3 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
[ -z "${R2500_VAL}" ] && R2500_VAL="null"
[ -z "${R2500_ERR_L}" ] && R2500_ERR_L="null"
[ -z "${R2500_ERR_U}" ] && R2500_ERR_U="null"
@@ -425,17 +439,17 @@ FGAS2500_ERR_U=`grep '^gas_fraction2500' ${RES_FINAL} | awk '{ print $3 }' | awk
[ -z "${FGAS2500_ERR_L}" ] && FGAS2500_ERR_L="null"
[ -z "${FGAS2500_ERR_U}" ] && FGAS2500_ERR_U="null"
# 2500 }}}
-FGRR=`grep '^gas_fraction.*r2500.*r500=' ${RES_FINAL} | sed 's/^.*r500=//' | awk '{ print $1 }'`
-FGRR_ERR_L=`grep '^gas_fraction.*r2500.*r500=' ${RES_FINAL} | sed 's/^.*r500=//' | awk '{ print $2 }' | awk -F'/' '{ print $1 }'`
-FGRR_ERR_U=`grep '^gas_fraction.*r2500.*r500=' ${RES_FINAL} | sed 's/^.*r500=//' | awk '{ print $2 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
+FGRR=`\grep '^gas_fraction.*r2500.*r500=' ${RES_FINAL} | sed 's/^.*r500=//' | awk '{ print $1 }'`
+FGRR_ERR_L=`\grep '^gas_fraction.*r2500.*r500=' ${RES_FINAL} | sed 's/^.*r500=//' | awk '{ print $2 }' | awk -F'/' '{ print $1 }'`
+FGRR_ERR_U=`\grep '^gas_fraction.*r2500.*r500=' ${RES_FINAL} | sed 's/^.*r500=//' | awk '{ print $2 }' | awk -F'/' '{ print $2 }' | tr -d '+'`
[ -z "${FGRR}" ] && FGRR="null"
[ -z "${FGRR_ERR_L}" ] && FGRR_ERR_L="null"
[ -z "${FGRR_ERR_U}" ] && FGRR_ERR_U="null"
## mrl }}}
## rcool & cooling time {{{
-RCOOL=`grep '^cooling' ${RES_FINAL} | awk '{ print $4 }'`
-COOLING_TIME=`grep '^cooling' ${RES_FINAL} | awk -F'=' '{ print $2 }' | tr -d ' Gyr'`
+RCOOL=`\grep '^cooling' ${RES_FINAL} | awk '{ print $4 }'`
+COOLING_TIME=`\grep '^cooling' ${RES_FINAL} | awk -F'=' '{ print $2 }' | tr -d ' Gyr'`
[ -z "${RCOOL}" ] && RCOOL="null"
[ -z "${COOLING_TIME}" ] && COOLING_TIME="null"
## cooling time }}}
@@ -444,35 +458,35 @@ COOLING_TIME=`grep '^cooling' ${RES_FINAL} | awk -F'=' '{ print $2 }' | tr -d '
cd ${BASEDIR}
## orig json file {{{
printf "## collect data from original info file ...\n"
-OBJ_NAME=`grep '"Source\ Name' ${JSON_FILE} | sed 's/.*"Source.*":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
-OBJ_UNAME=`grep '"Unified\ Name' ${JSON_FILE} | sed 's/.*"Unified.*":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
-OBJ_RA=`grep '"R\.\ A\.' ${JSON_FILE} | sed 's/.*"R\.\ A\.":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
-OBJ_DEC=`grep '"Dec\.' ${JSON_FILE} | sed 's/.*"Dec\.":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
-OBJ_XCRA=`grep '"XCNTRD_RA' ${JSON_FILE} | sed 's/.*"XCNTRD_RA":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
-OBJ_XCDEC=`grep '"XCNTRD_DEC' ${JSON_FILE} | sed 's/.*"XCNTRD_DEC":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
-REDSHIFT=`grep '"redshift' ${JSON_FILE} | sed 's/.*"redshift.*":\ //' | sed 's/\ *,$//'`
-COOLCORE=`grep -i '"cool.*core' ${JSON_FILE} | sed 's/.*"[cC]ool.*":\ //' | sed 's/\ *,$//'`
+OBJ_NAME=`\grep '"Source\ Name' ${JSON_FILE} | sed 's/.*"Source.*":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+OBJ_UNAME=`\grep '"Unified\ Name' ${JSON_FILE} | sed 's/.*"Unified.*":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+OBJ_RA=`\grep '"R\.\ A\.' ${JSON_FILE} | sed 's/.*"R\.\ A\.":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+OBJ_DEC=`\grep '"Dec\.' ${JSON_FILE} | sed 's/.*"Dec\.":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+OBJ_XCRA=`\grep '"XCNTRD_RA' ${JSON_FILE} | sed 's/.*"XCNTRD_RA":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+OBJ_XCDEC=`\grep '"XCNTRD_DEC' ${JSON_FILE} | sed 's/.*"XCNTRD_DEC":\ //' | sed 's/^"//' | sed 's/"\ *,$//'`
+REDSHIFT=`\grep '"redshift' ${JSON_FILE} | sed 's/.*"redshift.*":\ //' | sed 's/\ *,$//'`
+COOLCORE=`\grep -i '"cool.*core' ${JSON_FILE} | sed 's/.*"[cC]ool.*":\ //' | sed 's/\ *,$//'`
[ -z "${COOLCORE}" ] && COOLCORE="null"
-OBJ_FEATURE=`grep '"Feature' ${JSON_FILE} | sed 's/.*"Feature":\ //' | sed 's/^"//' | sed 's/"\ *,*$//'`
-OBJ_NOTE=`grep '"NOTE' ${JSON_FILE} | sed 's/.*"NOTE":\ //' | sed 's/^"//' | sed 's/"\ *,*$//'`
+OBJ_FEATURE=`\grep '"Feature' ${JSON_FILE} | sed 's/.*"Feature":\ //' | sed 's/^"//' | sed 's/"\ *,*$//'`
+OBJ_NOTE=`\grep '"NOTE' ${JSON_FILE} | sed 's/.*"NOTE":\ //' | sed 's/^"//' | sed 's/"\ *,*$//'`
# T & Z {{{
-T_1R500=`grep '"T(0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
-T_1ERR=`grep '"T_err(.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
-T_1ERR_L=`grep '"T_err_l.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
-T_1ERR_U=`grep '"T_err_u.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
-Z_1R500=`grep '"Z(0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
-Z_1ERR=`grep '"Z_err(.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
-Z_1ERR_L=`grep '"Z_err_l.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
-Z_1ERR_U=`grep '"Z_err_u.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
-T_2R500=`grep '"T(0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
-T_2ERR=`grep '"T_err(.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
-T_2ERR_L=`grep '"T_err_l.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
-T_2ERR_U=`grep '"T_err_u.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
-Z_2R500=`grep '"Z(0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
-Z_2ERR=`grep '"Z_err(.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
-Z_2ERR_L=`grep '"Z_err_l.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
-Z_2ERR_U=`grep '"Z_err_u.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+T_1R500=`\grep '"T(0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_1ERR=`\grep '"T_err(.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_1ERR_L=`\grep '"T_err_l.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_1ERR_U=`\grep '"T_err_u.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+Z_1R500=`\grep '"Z(0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_1ERR=`\grep '"Z_err(.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_1ERR_L=`\grep '"Z_err_l.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_1ERR_U=`\grep '"Z_err_u.*0\.1.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+T_2R500=`\grep '"T(0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_2ERR=`\grep '"T_err(.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_2ERR_L=`\grep '"T_err_l.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+T_2ERR_U=`\grep '"T_err_u.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"T.*":\ //' | sed 's/\ *,$//'`
+Z_2R500=`\grep '"Z(0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_2ERR=`\grep '"Z_err(.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_2ERR_L=`\grep '"Z_err_l.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
+Z_2ERR_U=`\grep '"Z_err_u.*0\.2.*R500' ${JSON_FILE} | sed 's/.*"Z.*":\ //' | sed 's/\ *,$//'`
[ -z "${T_1R500}" ] && T_1R500="null"
[ -z "${T_1ERR}" ] && T_1ERR="null"
[ -z "${T_1ERR_L}" ] && T_1ERR_L="null"
diff --git a/scripts/chandra_gensbpreg.sh b/scripts/chandra_gensbpreg.sh
index ecc1fe9..9a2014e 100755
--- a/scripts/chandra_gensbpreg.sh
+++ b/scripts/chandra_gensbpreg.sh
@@ -14,7 +14,13 @@
## Author: Zhenghao ZHU
## Created: ??? (TODO)
##
+UPDATED="2015/06/03"
## ChangeLogs:
+## v3.0, 2015/06/03, Aaron LI
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Added missing punlearn
+## * Removed the section of dmlist.par & dmextract.par deletion
## v2.1, 2015/02/13, Weitian LI
## * added '1' to denominators when calculate STN to avoid division by zero
## * added script description
@@ -22,7 +28,6 @@
## * added the parameter `-t' to print STN results for testing
##
-UPDATED="2015/02/13"
# minimal counts
CNT_MIN=50
@@ -35,11 +40,26 @@ CH_HI=479
CH_BKG_LOW=651
CH_BKG_HI=822
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmstat dmlist dmextract"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
if [ $# -lt 6 ]; then
printf "usage:\n"
printf " `basename $0` <evt> <evt_e> <x> <y> <bkg_pi> <reg_out>\n"
printf " `basename $0` -t <evt> <evt_e> <x> <y> <bkg_pi> <rin1> ...\n"
+ printf "updated:\n"
+ printf " ${UPDATED}\n"
exit 1
fi
@@ -59,10 +79,11 @@ if [ "x$1" = "x-t" ] && [ $# -ge 7 ]; then
TMP_SPC="_tmpspc.pi"
punlearn dmextract
dmextract infile="${EVT}[sky=${TMP_REG}][bin pi]" outfile=${TMP_SPC} wmap="[energy=300:12000][bin det=8]" clobber=yes
- INDEX_SRC=`dmstat "${TMP_SPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
- INDEX_BKG=`dmstat "${BKGSPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
- COUNT_SRC=`dmstat "${TMP_SPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
- COUNT_BKG=`dmstat "${BKGSPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
+ punlearn dmstat
+ INDEX_SRC=`dmstat "${TMP_SPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | \grep "sum:" | awk '{print $2}'`
+ INDEX_BKG=`dmstat "${BKGSPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | \grep "sum:" | awk '{print $2}'`
+ COUNT_SRC=`dmstat "${TMP_SPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | \grep "sum:" | awk '{print $2}'`
+ COUNT_BKG=`dmstat "${BKGSPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | \grep "sum:" | awk '{print $2}'`
# echo "CNT_SRC: ${COUNT_SRC}, IDX_SRC: ${INDEX_SRC}, CNT_BKG: ${COUNT_BKG}, IDX_BKG: ${INDEX_BKG}"
# exit
STN=`echo ${COUNT_SRC} ${INDEX_SRC} ${COUNT_BKG} ${INDEX_BKG} | awk '{ printf("%f",$1/$2/$3*$4) }'`
@@ -88,13 +109,6 @@ echo "Y: ${Y}"
echo "BKGSPC: ${BKGSPC}"
echo ""
-###
-printf "## remove dmlist.par dmextract.par\n"
-DMLIST_PAR="$HOME/cxcds_param4/dmlist.par"
-DMEXTRACT_PAR="$HOME/cxcds_param4/dmextract.par"
-[ -f "${DMLIST_PAR}" ] && rm -f ${DMLIST_PAR}
-[ -f "${DMEXTRACT_PAR}" ] && rm -f ${DMEXTRACT_PAR}
-
RIN=0
ROUT=0
CNTS=0
@@ -102,11 +116,13 @@ for i in `seq 1 10`; do
printf "gen reg #$i @ cnts:${CNT_MIN} ...\n"
ROUT=`expr $RIN + 5`
TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
- CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{ print $8 }'`
+ punlearn dmlist
+ CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | \grep 'EVENTS' | awk '{ print $8 }'`
while [ `echo "$CNTS < $CNT_MIN" | bc -l` -eq 1 ]; do
ROUT=`expr $ROUT + 1`
TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
- CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{ print $8 }'`
+ punlearn dmlist
+ CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | \grep 'EVENTS' | awk '{ print $8 }'`
done
TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
echo "${TMP_REG}" >> ${REG_OUT}
@@ -134,18 +150,20 @@ while [ `echo "${STN} > 1.5" | bc -l` -eq 1 ]; do
TMP_SPC=_tmpspc.pi
punlearn dmextract
dmextract infile="${EVT}[sky=${TMP_REG}][bin pi]" outfile=${TMP_SPC} wmap="[energy=300:12000][bin det=8]" clobber=yes
- INDEX_SRC=`dmstat "${TMP_SPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
- INDEX_BKG=`dmstat "${BKGSPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
+ punlearn dmstat
+ INDEX_SRC=`dmstat "${TMP_SPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | \grep "sum:" | awk '{print $2}'`
+ INDEX_BKG=`dmstat "${BKGSPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | \grep "sum:" | awk '{print $2}'`
- COUNT_SRC=`dmstat "${TMP_SPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
- COUNT_BKG=`dmstat "${BKGSPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}'`
+ COUNT_SRC=`dmstat "${TMP_SPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | \grep "sum:" | awk '{print $2}'`
+ COUNT_BKG=`dmstat "${BKGSPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | \grep "sum:" | awk '{print $2}'`
#echo "CNT_SRC: ${COUNT_SRC}, IDX_SRC: ${INDEX_SRC}, CNT_BKG: ${COUNT_BKG}, IDX_BKG: ${INDEX_BKG}"
#exit 99
# Add '1' to the denominators to avoid division by zero.
STN=`echo ${COUNT_SRC} ${INDEX_SRC} ${COUNT_BKG} ${INDEX_BKG} | awk '{ printf("%f", ($1 / ($2 + 1)) / ($3 / ($4 + 1))) }'`
- CNT=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{ print $8 }'`
+ punlearn dmlist
+ CNT=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | \grep 'EVENTS' | awk '{ print $8 }'`
echo "CNT: ${CNT}"
echo "CNT_MIN: ${CNT_MIN}"
if [ `echo "${CNT} < ${CNT_MIN}" | bc -l` -eq 1 ]; then
diff --git a/scripts/chandra_genspcreg.sh b/scripts/chandra_genspcreg.sh
index 5099136..2c0a8d3 100755
--- a/scripts/chandra_genspcreg.sh
+++ b/scripts/chandra_genspcreg.sh
@@ -1,9 +1,42 @@
#!/bin/sh
+##
+## This script generate a series of regions for the extraction of
+## radial surface brightness profile (SBP).
+##
+## Regions geneartion algorithm:
+## (TODO)
+##
+## Author: Zhenghao ZHU
+## Created: ???
+##
+UPDATED="2015/06/03"
+## ChangeLogs:
+## v2.0, 2015/06/03, Aaron LI
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Added missing punlearn
+## * Removed the section of dmlist.par & dmextract.par deletion
+##
+
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmstat dmlist dmextract"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
if [ $# -ne 6 ] ; then
- printf "usage:\n"
- printf " `basename $0` <evt> <evt_e> <bkg_pi> <x> <y> <reg_out>\n"
- exit 1
+ printf "usage:\n"
+ printf " `basename $0` <evt> <evt_e> <bkg_pi> <x> <y> <reg_out>\n"
+ printf "updated:\n"
+ printf " ${UPDATED}\n"
+ exit 1
fi
EVT=$1
@@ -21,13 +54,6 @@ echo "X: ${X}"
echo "Y: ${Y}"
echo ""
-###
-printf "## remove dmlist.par dmextract.par\n"
-DMLIST_PAR="$HOME/cxcds_param4/dmlist.par"
-DMEXTRACT_PAR="$HOME/cxcds_param4/dmextract.par"
-[ -f "${DMLIST_PAR}" ] && rm -fv ${DMLIST_PAR}
-[ -f "${DMEXTRACT_PAR}" ] && rm -fv ${DMEXTRACT_PAR}
-
#min counts
CNT_MIN=2500
#singal to noise
@@ -49,7 +75,6 @@ STN_FILE="spc_stn.dat"
[ -e ${STN_FILE} ] && mv -fv ${STN_FILE} ${STN_FILE}_bak
i=0
while [ `echo "$STN > 2 "| bc -l` -eq 1 ] ; do
- ## LIweitiaNux
if [ `echo "$ROUT > $ROUT_MAX" | bc -l` -eq 1 ]; then
break
fi
@@ -64,23 +89,26 @@ while [ `echo "$STN > 2 "| bc -l` -eq 1 ] ; do
ROUT=5
fi
TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
- CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{print $8}'`
+ punlearn dmlist
+ CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | \grep 'EVENTS' | awk '{print $8}'`
while [ ${CNTS} -lt ${CNT_MIN} ]; do
ROUT=`expr $ROUT + 1 `
if [ `echo "$ROUT > $ROUT_MAX" | bc -l` -eq 1 ]; then
break
fi
TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
- CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{print $8}'`
+ punlearn dmlist
+ CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | \grep 'EVENTS' | awk '{print $8}'`
done
TMP_SPC=_tmpspc.pi
punlearn dmextract
dmextract infile="${EVT}[sky=${TMP_REG}][bin pi]" outfile=${TMP_SPC} wmap="[energy=300:12000][bin tdet=8]" clobber=yes
- INDEX_SRC=`dmstat "${TMP_SPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}' `
- INDEX_BKG=`dmstat "${BKGSPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | grep "sum:" | awk '{print $2}' `
+ punlearn dmstat
+ INDEX_SRC=`dmstat "${TMP_SPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | \grep "sum:" | awk '{print $2}' `
+ INDEX_BKG=`dmstat "${BKGSPC}[channel=${CH_BKG_LOW}:${CH_BKG_HI}][cols counts]" | \grep "sum:" | awk '{print $2}' `
- COUNT_SRC=`dmstat "${TMP_SPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}' `
- COUNT_BKG=`dmstat "${BKGSPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | grep "sum:" | awk '{print $2}' `
+ COUNT_SRC=`dmstat "${TMP_SPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | \grep "sum:" | awk '{print $2}' `
+ COUNT_BKG=`dmstat "${BKGSPC}[channel=${CH_LOW}:${CH_HI}][cols counts]" | \grep "sum:" | awk '{print $2}' `
if [ ${INDEX_SRC} -eq 0 ] ;then
STN=10000
else
@@ -89,7 +117,7 @@ while [ `echo "$STN > 2 "| bc -l` -eq 1 ] ; do
echo " STN: ${STN}"
echo "${STN}" >> "${STN_FILE}"
done
-## LIweitiaNux
+
## fix 'i', to consistent with the actual annuluses
i=`expr $i - 1`
@@ -104,7 +132,8 @@ if [ $i -lt 3 ]; then
elif [ $i -gt 6 ]; then
mv -fv ${REG_OUT} ${REG_OUT}_2500bak
CNTS=0
- CNTS_TOTAL=`dmlist "${EVT_E}[sky=pie($X,$Y,0,$RIN,0,360)]" blocks | grep 'EVENTS' | awk '{print $8}'`
+ punlearn dmlist
+ CNTS_TOTAL=`dmlist "${EVT_E}[sky=pie($X,$Y,0,$RIN,0,360)]" blocks | \grep 'EVENTS' | awk '{print $8}'`
CNTS_USE=`echo "${CNTS_TOTAL} 6" | awk '{printf("%d", $1/$2)}'`
echo "CNT_USE: ${CNT_USE}"
printf "*** too many annulus ***\n"
@@ -119,7 +148,8 @@ elif [ $i -gt 6 ]; then
break
fi
TMP_REG="pie($X,$Y,$RIN,$ROUT,0,360)"
- CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | grep 'EVENTS' | awk '{print $8}'`
+ punlearn dmlist
+ CNTS=`dmlist "${EVT_E}[sky=${TMP_REG}]" blocks | \grep 'EVENTS' | awk '{print $8}'`
done
j=`expr $j + 1 `
echo "${TMP_REG}" >> ${REG_OUT}
diff --git a/scripts/chandra_pb_flux.sh b/scripts/chandra_pb_flux.sh
index d6860f8..59d4418 100755
--- a/scripts/chandra_pb_flux.sh
+++ b/scripts/chandra_pb_flux.sh
@@ -4,25 +4,48 @@
# 9.5-12.0 keV (channel: 651-822)
# PI = [ energy(eV) / 14.6 eV + 1 ]
#
-# LIweitiaNux <liweitianux@gmail.com>
-# July 30, 2012
+# Weitian <liweitianux@gmail.com>
+# 2012/07/30
#
# ChangeLog:
-# v1.1: August 2, 2012
-# fix bugs with scientific notation in `bc'
+# v2.0, 2015/06/03, Aaron LI
+# * Copy needed pfiles to tmp directory,
+# set environment variable $PFILES to use these first.
+# and remove them after usage.
+# v1.1: August 2, 2012
+# * fix bugs with scientific notation in `bc'
#
-if [ $# -eq 0 ]; then
+if [ $# -eq 0 ] || [ "x$1" = "x-h" ]; then
echo "usage:"
echo " `basename $0` <spec> ..."
exit 1
fi
## energy range: 9.5 -- 12.0 keV
+EN_LOW="9.5"
+EN_HI="12.0"
CH_LOW=651
CH_HI=822
-echo "CHANNEL: $CH_LOW -- $CH_HI"
+echo "Energy: $EN_LOW -- $EN_HI (keV)"
+echo "Channel: $CH_LOW -- $CH_HI"
+
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmstat dmkeypar"
+
+PFILES_TMPDIR="/tmp/pfiles-$$"
+[ -d "${PFILES_TMPDIR}" ] && rm -rf ${PFILES_TMPDIR} || mkdir ${PFILES_TMPDIR}
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} ${PFILES_TMPDIR}/
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="${PFILES_TMPDIR}:${PFILES}"
+## pfiles }}}
while ! [ -z $1 ]; do
f=$1
@@ -41,3 +64,6 @@ while ! [ -z $1 ]; do
echo " ${PB_FLUX}"
done
+# clean pfiles
+rm -rf ${PFILES_TMPDIR}
+
diff --git a/scripts/chandra_update_xcentroid.sh b/scripts/chandra_update_xcentroid.sh
index 3f2a907..958942d 100755
--- a/scripts/chandra_update_xcentroid.sh
+++ b/scripts/chandra_update_xcentroid.sh
@@ -2,25 +2,24 @@
#
unalias -a
export LC_COLLATE=C
-###########################################################
-## based on `ciao_expcorr_sbp.sh' ##
-## get `xcentroid' from region `sbprofile.reg' ##
-## convert from physical coords to WCS corrds ##
-## add/update xcentroid WCS to info.json ##
-## ##
-## LIweitiaNux <liweitianux@gmail.com> ##
-## 2013/05/29 ##
-###########################################################
-
-###########################################################
+##
+## based on `ciao_expcorr_sbp.sh'
+## get `xcentroid' from region `sbprofile.reg'
+## convert from physical coords to WCS corrds
+## add/update xcentroid WCS to info.json
+##
+## Weitian LI <liweitianux@gmail.com>
+## 2013/05/29
+##
+VERSION="v2.0"
+UPDATED="2015/06/03"
+##
## ChangeLogs:
-## v1.0, 2013/05/29, LIweitiaNux
-###########################################################
-
-## about, used in `usage' {{{
-VERSION="v1.0"
-UPDATE="2013-05-29"
-## about }}}
+## v2.0, 2015/06/03, Aaron LI
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Replace 'grep' with '\grep', 'ls' with '\ls'
+##
## error code {{{
ERR_USG=1
@@ -46,7 +45,7 @@ case "$1" in
printf "usage:\n"
printf " `basename $0` evt=<evt_file> reg=<sbp_reg> basedir=<base_dir> info=<INFO.json> update=<yes|no>\n"
printf "\nversion:\n"
- printf "${VERSION}, ${UPDATE}\n"
+ printf "${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -55,7 +54,7 @@ esac
## default parameters {{{
# default `event file' which used to match `blanksky' files
#DFT_EVT="_NOT_EXIST_"
-DFT_EVT="`ls evt2*_clean.fits 2> /dev/null`"
+DFT_EVT="`\ls evt2*_clean.fits 2> /dev/null`"
# default dir which contains `asols, asol.lis, ...' files
# DFT_BASEDIR="_NOT_EXIST_"
DFT_BASEDIR=".."
@@ -146,8 +145,8 @@ printf "## use basedir: \`${BASEDIR}'\n" | ${TOLOG}
# check INFO.json file
if [ ! -z "${info}" ] && [ -r "${BASEDIR}/${info}" ]; then
INFO_JSON="${info}"
-elif [ "`ls ${BASEDIR}/${DFT_INFO_PAT} | wc -l`" -eq 1 ]; then
- INFO_JSON=`( cd ${BASEDIR} && ls ${DFT_INFO_PAT} )`
+elif [ "`\ls ${BASEDIR}/${DFT_INFO_PAT} | wc -l`" -eq 1 ]; then
+ INFO_JSON=`( cd ${BASEDIR} && \ls ${DFT_INFO_PAT} )`
else
read -p "> info json file: " INFO_JSON
if ! [ -r "${BASEDIR}/${INFO_JSON}" ]; then
@@ -172,14 +171,27 @@ else
fi
## parameters }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmcoords"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
## main process {{{
# asolis
-ASOLIS=`( cd ${BASEDIR} && ls ${DFT_ASOLIS_PAT} 2> /dev/null )`
+ASOLIS=`( cd ${BASEDIR} && \ls ${DFT_ASOLIS_PAT} 2> /dev/null )`
# get (x,y) from sbp region
printf "get (x,y) from ${SBP_REG}\n"
-X=`grep -iE '(pie|annulus)' ${SBP_REG} | head -n 1 | awk -F',' '{ print $1 }' | tr -d 'a-zA-Z() '`
-Y=`grep -iE '(pie|annulus)' ${SBP_REG} | head -n 1 | awk -F',' '{ print $2 }' | tr -d 'a-zA-Z() '`
+X=`\grep -iE '(pie|annulus)' ${SBP_REG} | head -n 1 | awk -F',' '{ print $1 }' | tr -d 'a-zA-Z() '`
+Y=`\grep -iE '(pie|annulus)' ${SBP_REG} | head -n 1 | awk -F',' '{ print $2 }' | tr -d 'a-zA-Z() '`
# dmcoords to convert (x,y) to (ra,dec)
printf "\`dmcoords' to convert (x,y) to (ra,dec) ...\n"
@@ -194,7 +206,7 @@ printf "## (ra,dec): ($RA,$DEC)\n"
if [ "${F_UPDATE}" = "YES" ]; then
cp -f ${INFO_JSON} ${INFO_JSON}_bak
printf "update xcentroid for info.json ...\n"
- if grep -qE 'XCNTRD_(RA|DEC)' ${INFO_JSON}; then
+ if \grep -qE 'XCNTRD_(RA|DEC)' ${INFO_JSON}; then
printf "update ...\n"
sed -i'' "s/XCNTRD_RA.*$/XCNTRD_RA\":\ \"${RA}\",/" ${INFO_JSON}
sed -i'' "s/XCNTRD_DEC.*$/XCNTRD_DEC\":\ \"${DEC}\",/" ${INFO_JSON}
diff --git a/scripts/chandra_xcentroid.sh b/scripts/chandra_xcentroid.sh
index 14be6a5..5a1e908 100755
--- a/scripts/chandra_xcentroid.sh
+++ b/scripts/chandra_xcentroid.sh
@@ -12,25 +12,27 @@
## if `DETNAM' has `0123', then `ACIS-I' ##
## if `DETNAM' has `7', then `ACIS-S' ##
## ##
-## LIweitiaNux <liweitianux@gmail.com> ##
-## November 8, 2012 ##
-###########################################################
-
+## Weitian LI <liweitianux@gmail.com> ##
+## 2012/11/08 ##
###########################################################
+##
+VERSION="v3.0"
+UPDATED="2015/06/03"
+##
## ChangeLogs:
-## v2.1, 2013/10/12, LIweitiaNux
+## v3.0, 2015/06/03, Aaron LI
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Replace 'grep' with '\grep', 'ls' with '\ls'
+## * Removed section of 'dmstat.par' deletion
+## v2.1, 2013/10/12, Weitian LI
## add support for extract center coordinates from 'point' and 'circle' regs
-## v2.0, 2013/01/22, LIweitiaNux
+## v2.0, 2013/01/22, Weitian LI
## aconvolve switch
-## add iterations for better confidence
-## v1.1, 2012/11/08, LIweitiaNux
+## add iterations for better accuracy
+## v1.1, 2012/11/08, Weitian LI
## get x-ray peak coord from given region file
-###########################################################
-
-## about, used in `usage' {{{
-VERSION="v2.1"
-UPDATE="2013-10-12"
-## about }}}
+##
## error code {{{
ERR_USG=1
@@ -53,7 +55,7 @@ case "$1" in
printf "usage:\n"
printf " `basename $0` evt=<evt_cl> reg=<reg> [ asol=<asol> chip=<chip> ] [ conv=yes|No ]\n"
printf "\nversion:\n"
- printf "${VERSION}, ${UPDATE}\n"
+ printf "${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -78,11 +80,11 @@ OFFSET_CRIC=10
# energy range: 700-2000 eV
E_RANGE="700:2000"
# default `evt clean file'
-DFT_EVT="`ls evt*clean.fits *clean*evt*.fits 2> /dev/null | head -n 1`"
+DFT_EVT="`\ls evt*clean.fits *clean*evt*.fits 2> /dev/null | head -n 1`"
# default `asol file'
-DFT_ASOL="`ls pcadf*_asol1.fits 2> /dev/null | head -n 1`"
+DFT_ASOL="`\ls pcadf*_asol1.fits 2> /dev/null | head -n 1`"
# default region file
-DFT_REG="`ls sbprofile.reg rspec.reg 2> /dev/null | head -n 1`"
+DFT_REG="`\ls sbprofile.reg rspec.reg 2> /dev/null | head -n 1`"
# iteration step, ~150 arcsec, ~50 arcsec
R_STP1=300
R_STP2=100
@@ -153,8 +155,8 @@ else
fi
printf "## use reg file: \`${REG}'\n"
# get centroid from the regionnn file
-CNTRD_X2=`grep -iE '(point|circle|pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $1 }'`
-CNTRD_Y2=`grep -iE '(point|circle|pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $2 }'`
+CNTRD_X2=`\grep -iE '(point|circle|pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $1 }'`
+CNTRD_Y2=`\grep -iE '(point|circle|pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $2 }'`
printf "## center from given regfile: (${CNTRD_X2},${CNTRD_Y2})\n"
@@ -181,12 +183,12 @@ else
# determine chip by ACIS type
punlearn dmkeypar
DETNAM=`dmkeypar ${EVT} DETNAM echo=yes`
- if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+ if echo ${DETNAM} | \grep -q 'ACIS-0123'; then
printf "## \`DETNAM' (${DETNAM}) has chips 0123\n"
printf "## ACIS-I\n"
ACIS_TYPE="ACIS-I"
CHIP="0:3"
- elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+ elif echo ${DETNAM} | \grep -q 'ACIS-[0-6]*7'; then
printf "## \`DETNAM' (${DETNAM}) has chip 7\n"
printf "## ACIS-S\n"
ACIS_TYPE="ACIS-S"
@@ -198,13 +200,18 @@ else
fi
## parameters }}}
-## dmstat.par {{{
-## 2013/02/07, LIweitiaNux
-## for some unknown reason, the wrong parameter file will cause dmstat failed to run
-printf "remove \`dmstat.par' ...\n"
-DMSTAT_PAR="$HOME/cxcds_param4/dmstat.par"
-[ -f "${DMSTAT_PAR}" ] && rm -fv ${DMSTAT_PAR}
-## dmstat.par }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmkeypar dmcopy dmstat dmcoords skyfov aconvolve"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
## main part {{{
# generate `skyfov'
@@ -244,7 +251,6 @@ printf " region size ${R_STP1}pix: "
for i in `seq 1 5`; do
printf "#$i ... "
punlearn dmstat
- # dmstat infile="${IMG_ACONV}[sky=region(${TMP_REG})]" centroid=yes verbose=1
dmstat infile="${IMG_ACONV}[sky=region(${TMP_REG})]" centroid=yes verbose=0
CNTRD_X=`pget dmstat out_cntrd_phys | cut -d',' -f1`
CNTRD_Y=`pget dmstat out_cntrd_phys | cut -d',' -f2`
diff --git a/scripts/chandra_xpeak_coord.sh b/scripts/chandra_xpeak_coord.sh
index 8b0644a..beb2698 100755
--- a/scripts/chandra_xpeak_coord.sh
+++ b/scripts/chandra_xpeak_coord.sh
@@ -12,20 +12,21 @@
## if `DETNAM' has `0123', then `ACIS-I' ##
## if `DETNAM' has `7', then `ACIS-S' ##
## ##
-## LIweitiaNux <liweitianux@gmail.com> ##
-## November 8, 2012 ##
-###########################################################
-
+## Weitian LI <liweitianux@gmail.com> ##
+## 2012/11/08 ##
###########################################################
+##
+VERSION="v2.0"
+UPDATED="2015/06/03"
+##
## ChangeLogs:
-## v1.1, 2012/11/08, LIweitiaNux
+## v2.0, 2015/06/03, Aaron LI
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Replace 'grep' with '\grep', 'ls' with '\ls'
+## v1.1, 2012/11/08, Weitian LI
## get x-ray peak coord from given region file
-###########################################################
-
-## about, used in `usage' {{{
-VERSION="v1.1"
-UPDATE="2012-11-08"
-## about }}}
+##
## error code {{{
ERR_USG=1
@@ -48,7 +49,7 @@ case "$1" in
printf "usage:\n"
printf " `basename $0` evt=<evt_cl> asol=<asol> [ reg=<reg> chip=<chip> ]\n"
printf "\nversion:\n"
- printf "${VERSION}, ${UPDATE}\n"
+ printf "${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -56,11 +57,11 @@ esac
## default parameters {{{
# default `evt clean file'
-DFT_EVT="`ls evt*clean.fits *clean*evt*.fits 2> /dev/null | head -n 1`"
+DFT_EVT="`\ls evt*clean.fits *clean*evt*.fits 2> /dev/null | head -n 1`"
# default `asol file'
-DFT_ASOL="`ls ../pcadf*_asol1.fits pcadf*_asol1.fits 2> /dev/null | head -n 1`"
+DFT_ASOL="`\ls ../pcadf*_asol1.fits pcadf*_asol1.fits 2> /dev/null | head -n 1`"
# default region file
-DFT_REG="`ls sbprofile.reg rspec.reg 2> /dev/null | head -n 1`"
+DFT_REG="`\ls sbprofile.reg rspec.reg 2> /dev/null | head -n 1`"
## default parameters }}}
## functions {{{
@@ -128,12 +129,12 @@ else
# determine chip by ACIS type
punlearn dmkeypar
DETNAM=`dmkeypar ${EVT} DETNAM echo=yes`
- if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+ if echo ${DETNAM} | \grep -q 'ACIS-0123'; then
printf "## \`DETNAM' (${DETNAM}) has chips 0123\n"
printf "## ACIS-I\n"
ACIS_TYPE="ACIS-I"
CHIP="0:3"
- elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+ elif echo ${DETNAM} | \grep -q 'ACIS-[0-6]*7'; then
printf "## \`DETNAM' (${DETNAM}) has chip 7\n"
printf "## ACIS-S\n"
ACIS_TYPE="ACIS-S"
@@ -145,6 +146,19 @@ else
fi
## parameters }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmkeypar dmcopy dmstat dmcoords skyfov aconvolve"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
## main part {{{
# generate `skyfov'
SKYFOV="_skyfov.fits"
@@ -199,8 +213,8 @@ printf " (RA,DEC): (${MAX_RA},${MAX_DEC})\n"
## region file based {{{
if [ -r "${REG}" ]; then
- MAX_X2=`grep -iE '(pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $1 }'`
- MAX_Y2=`grep -iE '(pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $2 }'`
+ MAX_X2=`\grep -iE '(pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $1 }'`
+ MAX_Y2=`\grep -iE '(pie|annulus)' ${REG} | head -n 1 | tr -d 'a-zA-Z()' | awk -F',' '{ print $2 }'`
punlearn dmcoords
dmcoords infile="${EVT}" asolfile="${ASOL}" option=sky x=${MAX_X2} y=${MAX_Y2}
MAX_RA2=`pget dmcoords ra`
diff --git a/scripts/ciao_bkg_spectra.sh b/scripts/ciao_bkg_spectra.sh
index 4cb8475..96560e6 100755
--- a/scripts/ciao_bkg_spectra.sh
+++ b/scripts/ciao_bkg_spectra.sh
@@ -1,8 +1,6 @@
-#!/bin/sh -
+#!/bin/sh
#
-trap date INT
unalias -a
-export GREP_OPTIONS=""
export LC_COLLATE=C
###########################################################
## extract background spectra from src and blanksky ##
@@ -15,14 +13,22 @@ export LC_COLLATE=C
## Ref: CIAO v4.4 region bugs ##
## http://cxc.harvard.edu/ciao/bugs/regions.html#bug-12187
## ##
-## LIweitiaNux, July 24, 2012 ##
+## Weitian LI ##
+## 2012/07/24 ##
###########################################################
-###########################################################
+VERSION="v5.0"
+UPDATED="2015/06/02"
# ChangeLogs:
-# v3, 2012/08/09
-# fix `scientific notation' for `bc'
-# change `spec group' method to `min 15'
+# v5.0, 2015/06/02, Aaron LI
+# * Removed 'GREP_OPTIONS' and replace 'grep' with '\grep'
+# * Removed 'trap INT date'
+# * Copy needed pfiles to current working directory, and
+# set environment variable $PFILES to use these first.
+# * replaced 'grppha' with 'dmgroup' to group spectra
+# (dmgroup will add history to fits file, while grppha NOT)
+# v4.1, 2014/07/29, Weitian LI
+# fix 'pbkfile' parameters for CIAO-4.6
# v4, 2012/08/13
# add `clobber=yes'
# improve error code
@@ -31,22 +37,22 @@ export LC_COLLATE=C
# (through cmdline which similar to CIAO,
# and default filename match patterns)
# add simple `logging' function
-# v4.1, 2014/07/29, Weitian LI
-# fix 'pbkfile' parameters for CIAO-4.6
-###########################################################
+# v3, 2012/08/09
+# fix `scientific notation' for `bc'
+# change `spec group' method to `min 15'
+#
-## about, used in `usage' {{{
-VERSION="v4"
-UPDATE="2012-08-14"
-## about }}}
## usage, help {{{
case "$1" in
-[hH]*|--[hH]*)
printf "usage:\n"
- printf " `basename $0` evt=<evt2_clean> reg=<reglist> blank=<blanksky_evt> basedir=<base_dir> nh=<nH> z=<redshift> [ grpcmd=<grppha_cmd> log=<log_file> ]\n"
+ printf " `basename $0` evt=<evt2_clean> reg=<reglist> blank=<blanksky_evt> basedir=<base_dir> nh=<nH> z=<redshift> [ grouptype=<NUM_CTS|BIN> grouptypeval=<number> binspec=<binspec> log=<log_file> ]\n"
+ printf "\nNotes:\n"
+ printf " If grouptype=NUM_CTS, then grouptypeval required.\n"
+ printf " If grouptype=BIN, then binspec required.\n"
printf "\nversion:\n"
- printf "${VERSION}, ${UPDATE}\n"
+ printf " ${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -55,16 +61,18 @@ esac
## default parameters {{{
# default `event file' which used to match `blanksky' files
#DFT_EVT="_NOT_EXIST_"
-DFT_EVT="`ls evt2*_clean.fits`"
+DFT_EVT="`\ls evt2*_clean.fits`"
# default `blanksky file'
#DFT_BLANK="_NOT_EXIST_"
-DFT_BLANK="`ls blanksky*.fits`"
+DFT_BLANK="`\ls blanksky*.fits`"
# default dir which contains `asols, asol.lis, ...' files
#DFT_BASEDIR="_NOT_EXIST_"
DFT_BASEDIR=".."
-# default `group command' for `grppha'
-#DFT_GRP_CMD="group 1 128 2 129 256 4 257 512 8 513 1024 16"
-DFT_GRP_CMD="group min 20"
+# default parameters for 'dmgroup'
+DFT_GROUPTYPE="NUM_CTS"
+DFT_GROUPTYPEVAL="20"
+#DFT_GROUPTYPE="BIN"
+DFT_BINSPEC="1:128:2,129:256:4,257:512:8,513:1024:16"
# default `log file'
DFT_LOGFILE="bkg_spectra_`date '+%Y%m%d'`.log"
@@ -115,7 +123,7 @@ CH_LOW=651
CH_HI=822
pb_flux() {
punlearn dmstat
- COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | grep -i 'sum:' | awk '{ print $2 }'`
+ COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | \grep -i 'sum:' | awk '{ print $2 }'`
punlearn dmkeypar
EXPTIME=`dmkeypar $1 EXPOSURE echo=yes`
BACK=`dmkeypar $1 BACKSCAL echo=yes`
@@ -222,13 +230,28 @@ fi
# remove the trailing '/'
BASEDIR=`echo ${BASEDIR} | sed 's/\/*$//'`
printf "## use basedir: \`${BASEDIR}'\n" | ${TOLOG}
-# check given `grpcmd'
-if [ ! -z "${grpcmd}" ]; then
- GRP_CMD="${grpcmd}"
+# check given dmgroup parameters: grouptype, grouptypeval, binspec
+if [ -z "${grouptype}" ]; then
+ GROUPTYPE="${DFT_GROUPTYPE}"
+elif [ "x${grouptype}" = "xNUM_CTS" ] || [ "x${grouptype}" = "xBIN" ]; then
+ GROUPTYPE="${grouptype}"
+else
+ printf "ERROR: given grouptype \`${grouptype}' invalid.\n"
+ exit ${ERR_GRPTYPE}
+fi
+printf "## use grouptype: \`${GROUPTYPE}'\n" | ${TOLOG}
+if [ ! -z "${grouptypeval}" ]; then
+ GROUPTYPEVAL="${grouptypeval}"
else
- GRP_CMD="${DFT_GRP_CMD}"
+ GROUPTYPEVAL="${DFT_GROUPTYPEVAL}"
fi
-printf "## use grppha cmd: \`${GRP_CMD}'\n" | ${TOLOG}
+printf "## use grouptypeval: \`${GROUPTYPEVAL}'\n" | ${TOLOG}
+if [ ! -z "${binspec}" ]; then
+ BINSPEC="${binspec}"
+else
+ BINSPEC="${DFT_BINSPEC}"
+fi
+printf "## use binspec: \`${BINSPEC}'\n" | ${TOLOG}
## parameters }}}
## check needed files {{{
@@ -242,7 +265,7 @@ for reg_f in ${REGLIST}; do
done
# check the validity of *pie* regions
printf "check pie reg validity ...\n"
-INVALID=`cat ${REGLIST} | grep -i 'pie' | awk -F, '{ print $6 }' | tr -d ')' | awk '$1 > 360'`
+INVALID=`cat ${REGLIST} | \grep -i 'pie' | awk -F, '{ print $6 }' | tr -d ')' | awk '$1 > 360'`
if [ "x${INVALID}" != "x" ]; then
printf "WARNING: some pie region's END_ANGLE > 360\n" | ${TOLOG}
printf " CIAO v4.4 tools may run into trouble\n"
@@ -251,28 +274,28 @@ fi
# check files in `basedir'
printf "check needed files in basedir \`${BASEDIR}' ...\n"
# check asolis files
-ASOLIS=`ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
+ASOLIS=`\ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
if [ -z "${ASOLIS}" ]; then
printf "ERROR: cannot find \"${DFT_ASOLIS_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_ASOL}
fi
printf "## use asolis: \`${ASOLIS}'\n" | ${TOLOG}
# check badpixel file
-BPIX=`ls -1 ${BASEDIR}/${DFT_BPIX_PAT} | head -n 1`
+BPIX=`\ls -1 ${BASEDIR}/${DFT_BPIX_PAT} | head -n 1`
if [ -z "${BPIX}" ]; then
printf "ERROR: cannot find \"${DFT_BPIX_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_BPIX}
fi
printf "## use badpixel: \`${BPIX}'\n" | ${TOLOG}
# check pbk file
-PBK=`ls -1 ${BASEDIR}/${DFT_PBK_PAT} | head -n 1`
+PBK=`\ls -1 ${BASEDIR}/${DFT_PBK_PAT} | head -n 1`
if [ -z "${PBK}" ]; then
printf "ERROR: cannot find \"${DFT_PBK_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_PBK}
fi
printf "## use pbk: \`${PBK}'\n" | ${TOLOG}
# check msk file
-MSK=`ls -1 ${BASEDIR}/${DFT_MSK_PAT} | head -n 1`
+MSK=`\ls -1 ${BASEDIR}/${DFT_MSK_PAT} | head -n 1`
if [ -z "${MSK}" ]; then
printf "ERROR: cannot find \"${DFT_MSK_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_MSK}
@@ -280,6 +303,19 @@ fi
printf "## use msk: \`${MSK}'\n" | ${TOLOG}
## check files }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmstat dmkeypar dmhedit specextract dmextract dmgroup"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
## main part {{{
## use 'for' loop to process every region file
for reg_i in ${REGLIST}; do
@@ -290,7 +326,7 @@ for reg_i in ${REGLIST}; do
[ -f "${REG_TMP}" ] && rm -fv ${REG_TMP} # remove tmp files
cp -fv ${reg_i} ${REG_TMP}
# check the validity of *pie* regions {{{
- INVALID=`grep -i 'pie' ${REG_TMP} | awk -F, '{ print $6 }' | tr -d ')' | awk '$1 > 360'`
+ INVALID=`\grep -i 'pie' ${REG_TMP} | awk -F, '{ print $6 }' | tr -d ')' | awk '$1 > 360'`
if [ "x${INVALID}" != "x" ]; then
printf "WARNING: fix for *pie* region in file \`${reg_i}'\n"
cat ${REG_TMP}
@@ -303,30 +339,52 @@ for reg_i in ${REGLIST}; do
## check pie region }}}
LBKG_PI="${reg_i%.reg}.pi"
+
+ printf "use \`specextract' to generate spectra and response ...\n"
## use `specextract' to extract local bkg spectrum {{{
# NOTE: set `binarfwmap=2' to save the time for generating `ARF'
# I have tested that this bin factor has little impact on the results.
# NO background response files
# NO background spectrum (generate by self)
- # NO spectrum grouping (group by self using `grppha')
+ # NO spectrum grouping (group by self using `dmgroup')
+ # Determine parameters for different versions of specextract {{{
# 'pbkfile' parameter deprecated in CIAO-4.6
if `pget specextract pbkfile >/dev/null 2>&1`; then
P_PBKFILE="pbkfile=${PBK}"
else
P_PBKFILE=""
fi
- #
- printf "use \`specextract' to generate spectra and response ...\n"
+ # specextract: revision 2013-06:
+ # 'correct' parameter renamed to 'correctpsf'
+ if `pget specextract correct >/dev/null 2>&1`; then
+ P_CORRECT="correct=no"
+ else
+ P_CORRECT="correctpsf=no"
+ fi
+ # specextract: revision 2013-12:
+ # 'weight' parameter controls whether ONLY ARFs are weighted.
+ # 'weight_rmf' added to control whether RMFs are weighted.
+ # NOTE:
+ # (1) only when 'weight=yes' will the 'weight_rmf' parameter be used.
+ # (2) no longer distingush between unweighted and weighted reponses
+ # in file extension; only .arf & .rmf are now used.
+ if `pget specextract weight_rmf >/dev/null 2>&1`; then
+ P_WEIGHTRMF="weight_rmf=yes"
+ else
+ P_WEIGHTRMF=""
+ fi
+ # }}}
punlearn specextract
specextract infile="${EVT}[sky=region(${REG_TMP})]" \
outroot=${LBKG_PI%.pi} bkgfile="" asp="@${ASOLIS}" \
- ${P_PBKFILE} mskfile="${MSK}" badpixfile="${BPIX}" \
- weight=yes correct=no bkgresp=no \
+ mskfile="${MSK}" badpixfile="${BPIX}" \
+ ${P_PBKFILE} ${P_CORRECT} \
+ weight=yes ${P_WEIGHTRMF} \
energy="0.3:11.0:0.01" channel="1:1024:1" \
- combine=no binarfwmap=2 \
+ bkgresp=no combine=no binarfwmap=2 \
grouptype=NONE binspec=NONE \
- verbose=2 clobber=yes
- # `specextract' }}}
+ clobber=yes verbose=2
+ # specextract }}}
## generate the blanksky bkg spectrum {{{
printf "generate the blanksky bkg spectrum ...\n"
@@ -342,11 +400,16 @@ for reg_i in ${REGLIST}; do
## bkg renorm }}}
## group spectrum {{{
- printf "group spectrum using \`grppha'\n"
+ # use 'dmgroup' instead of 'grppha', because 'dmgroup' will add
+ # command history to FITS header (maybe useful for later reference).
+ printf "group spectrum using \`dmgroup'\n"
LBKG_GRP_PI="${LBKG_PI%.pi}_grp.pi"
- grppha infile="${LBKG_PI}" outfile="${LBKG_GRP_PI}" \
- comm="${GRP_CMD} & exit" clobber=yes > /dev/null
- ## group spectra }}}
+ punlearn dmgroup
+ dmgroup infile="${LBKG_PI}" outfile="${LBKG_GRP_PI}" \
+ grouptype="${GROUPTYPE}" grouptypeval=${GROUPTYPEVAL} \
+ binspec="${BINSPEC}" xcolumn="CHANNEL" ycolumn="COUNTS" \
+ clobber=yes
+ ## group }}}
## generate a script for XSPEC {{{
XSPEC_XCM="xspec_${LBKG_PI%.pi}_model.xcm"
@@ -420,6 +483,5 @@ printf "clean ...\n"
rm -f ${REG_TMP}
printf "DONE\n"
-###########################################################
# vim: set ts=8 sw=4 tw=0 fenc=utf-8 ft=sh: #
diff --git a/scripts/ciao_blanksky.sh b/scripts/ciao_blanksky.sh
index da1d6f6..acd821e 100755
--- a/scripts/ciao_blanksky.sh
+++ b/scripts/ciao_blanksky.sh
@@ -13,7 +13,8 @@ export LC_COLLATE=C
## output: `blanksky_c7.fits' for ACIS-S, ##
## `blanksky-c0-3.fits' for ACIS-I ##
## ##
-## LIweitiaNux, January 11, 2011 ##
+## Weitian LI ##
+## 2011/01/11 ##
###########################################################
###########################################################
@@ -22,10 +23,17 @@ export LC_COLLATE=C
## add ACIS-I support (chips 0-3)
## v3: 2012/08/06, Junhua Gu
## pass parameters by cmd line param
-## v4: 2012/08/13, LIweitiaNux
+## v4: 2012/08/13, Weitian LI
## add `clobber=yes' parameters to CIAO tools
## improve `commandline arguements'
## add `default parameters'
+## v5.0, 2015/06/02, Aaron LI
+## * Replaced 'ls' with '\ls'
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Strip the directory of asol files in the 'asol*.lis' file.
+## CIAO-4.6 save the *absolute path* of asol files in 'asol*.lis',
+## which may cause problems if the data directory was moved later.
###########################################################
## about, used in `usage' {{{
@@ -48,7 +56,7 @@ esac
## default parameters {{{
# default `event file' which used to match `blanksky' files
#DFT_EVT="_NOT_EXIST_"
-DFT_EVT="`ls evt2*_clean.fits`"
+DFT_EVT="`\ls evt2*_clean.fits`"
# default dir which contains `asols, asol.lis, ...' files
# DFT_BASEDIR="_NOT_EXIST_"
DFT_BASEDIR=".."
@@ -140,17 +148,36 @@ printf "## use basedir: \`${BASEDIR}'\n"
## check files in `basedir' {{{
# check asol files
-ASOLIS=`ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
+ASOLIS=`\ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
if [ -z ${ASOLIS} ]; then
printf "ERROR: cannot find \"${DFT_ASOLIS_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_ASOL}
fi
printf "## use asolis: \`${ASOLIS}'\n"
## check files }}}
-#exit 0 # test script
+
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="acis_bkgrnd_lookup dmmerge dmcopy dmmakepar dmreadpar reproject_events acis_process_events"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
#### main start {{{
+# Strip the directory of asol files in the 'asol*.lis' file
+# CIAO-4.6 save the absolute path of asol files in 'asol*.lis' file,
+# which may cause problems if the data directory was moved later.
+mv -fv ${ASOLIS} ${ASOLIS}_bak
+awk -F'/' '{ print $NF }' ${ASOLIS}_bak > ${ASOLIS}
+
printf "look up coresponding background file ...\n"
+punlearn acis_bkgrnd_lookup
BKG_LKP="`acis_bkgrnd_lookup ${EVT}`"
AS_NUM=`echo ${BKG_LKP} | tr ' ' '\n' | \grep 'acis7sD' | wc -l`
AI_NUM=`echo ${BKG_LKP} | tr ' ' '\n' | \grep 'acis[0123]iD' | wc -l`
@@ -216,7 +243,7 @@ EVT_HEADER="_evt_header.par"
EVT_PNT="_evt_pnt.par"
punlearn dmmakepar
dmmakepar ${EVT} ${EVT_HEADER} clobber=yes
-grep -i '_pnt' ${EVT_HEADER} > ${EVT_PNT}
+\grep -i '_pnt' ${EVT_HEADER} > ${EVT_PNT}
punlearn dmreadpar
dmreadpar ${EVT_PNT} "${BKG_ROOT}_orig.fits[EVENTS]" clobber=yes
printf "DONE\n"
diff --git a/scripts/ciao_calc_csb.sh b/scripts/ciao_calc_csb.sh
index ff922ee..dd44dad 100755
--- a/scripts/ciao_calc_csb.sh
+++ b/scripts/ciao_calc_csb.sh
@@ -1,15 +1,29 @@
#!/bin/sh
#
+# Calculate the Csb values.
# for 'z>0.3' or 'counts_in_0.048R500<500'
-# execute this script in dir 'spc/profile'
#
-# original filename: 'proxy_calc.sh', by Zhu Zhenhao
+# Reference: ??? (TODO)
+#
+# Note: execute this script in dir 'spc/profile'
+#
+# origin filename: 'proxy_calc.sh', by Zhenhao ZHU
# modified by: Weitian LI
#
-# ChangeLog:
-# 2014/12/15: (1) prompt and record the modification to csb region;
-# (2) added 'm' answer for WR: modified.
-# 2014/06/18: added answer for WR (warn region)
+#
+UPDATED="2015/06/03"
+#
+# Changelogs:
+# 2015/06/03, Aaron LI
+# * Copy needed pfiles to current working directory, and
+# set environment variable $PFILES to use these first.
+# * Replaced 'grep' with '\grep', 'ls' with '\ls'
+# * ds9 colormap changed from 'sls' to 'he'
+# 2014/12/15, Weitian LI
+# * prompt and record the modification to csb region;
+# * added 'm' answer for WR: modified.
+# 2014/06/18, Weitian LI
+# * added answer for WR (warn region)
#
## error code {{{
@@ -28,7 +42,9 @@ case "$1" in
-[hH]*|--[hH]*)
printf "usage:\n"
printf " `basename $0` evt_e=<evt_e_name> expmap=<expmap_name> basedir=<base_dir> imgdir=<img_dir> json=<json_name>\n"
- printf "NOTE: exec this script in dir 'spc/profile'\n"
+ printf "\nNOTE: exec this script in dir 'spc/profile'\n"
+ printf "\nupdated:\n"
+ printf " ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -114,8 +130,8 @@ printf "## use imgdir: \`${IMG_DIR}'\n"
# info json
if [ ! -z "${json}" ] && [ -r "${BASEDIR}/${json}" ]; then
JSON_FILE="${BASEDIR}/${json}"
-elif [ `ls -1 ${BASEDIR}/${DFT_JSON_PAT} 2>/dev/null | wc -l` -eq 1 ]; then
- JSON_FILE="`ls ${BASEDIR}/${DFT_JSON_PAT} 2>/dev/null`"
+elif [ `\ls -1 ${BASEDIR}/${DFT_JSON_PAT} 2>/dev/null | wc -l` -eq 1 ]; then
+ JSON_FILE="`\ls ${BASEDIR}/${DFT_JSON_PAT} 2>/dev/null`"
else
read -p "> info json: " JSON_FILE
if [ ! -r "${BASEDIR}/${JSON_FILE}" ]; then
@@ -127,8 +143,8 @@ printf "## use json_file: \`${JSON_FILE}'\n"
# expmap
if [ ! -z "${expmap}" ] && [ -r "${IMG_DIR}/${expmap}" ]; then
EXPMAP="${expmap}"
-elif [ `ls -1 ${IMG_DIR}/${DFT_EXPMAP_PAT} 2>/dev/null | wc -l` -eq 1 ]; then
- EXPMAP="`( cd ${IMG_DIR} && ls ${DFT_EXPMAP_PAT} 2>/dev/null )`"
+elif [ `\ls -1 ${IMG_DIR}/${DFT_EXPMAP_PAT} 2>/dev/null | wc -l` -eq 1 ]; then
+ EXPMAP="`( cd ${IMG_DIR} && \ls ${DFT_EXPMAP_PAT} 2>/dev/null )`"
else
read -p "> expmap filename: " EXPMAP
if [ ! -r "${IMG_DIR}/${EXPMAP}" ]; then
@@ -140,8 +156,8 @@ printf "## use expmap: \`${EXPMAP}'\n"
# evt_e
if [ ! -z "${evt_e}" ] && [ -r "${IMG_DIR}/${evt_e}" ]; then
EVT_E="${evt_e}"
-elif [ `ls -1 ${IMG_DIR}/${DFT_EVTE_PAT} 2>/dev/null | wc -l` -eq 1 ]; then
- EVT_E="`( cd ${IMG_DIR} && ls ${DFT_EVTE_PAT} 2>/dev/null )`"
+elif [ `\ls -1 ${IMG_DIR}/${DFT_EVTE_PAT} 2>/dev/null | wc -l` -eq 1 ]; then
+ EVT_E="`( cd ${IMG_DIR} && \ls ${DFT_EVTE_PAT} 2>/dev/null )`"
else
read -p "> evt_e filename: " EVT_E
if [ ! -r "${IMG_DIR}/${EVT_E}" ]; then
@@ -152,16 +168,29 @@ fi
printf "## use evt_e: \`${EVT_E}'\n"
## }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmlist dmextract"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
## main {{{
# in 'spc/profile'
-X=`grep -iE '(pie|annulus)' ${RSPEC_REG} | head -n 1 | awk -F'(' '{ print $2 }' | awk -F',' '{ print $1 }'`
-Y=`grep -iE '(pie|annulus)' ${RSPEC_REG} | head -n 1 | awk -F'(' '{ print $2 }' | awk -F',' '{ print $2 }'`
+X=`\grep -iE '(pie|annulus)' ${RSPEC_REG} | head -n 1 | awk -F'(' '{ print $2 }' | awk -F',' '{ print $1 }'`
+Y=`\grep -iE '(pie|annulus)' ${RSPEC_REG} | head -n 1 | awk -F'(' '{ print $2 }' | awk -F',' '{ print $2 }'`
# json file
-Z=`grep -i '"redshift"' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
-R500=`grep '"R500.*kpc' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
-OBS_ID=`grep '"Obs.*ID' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
-OBJ_NAME=`grep '"Source\ Name' ${JSON_FILE} | awk -F':' '{ print $2 }' | sed -e 's/\ *"//' -e 's/"\ *,$//'`
-#CT=`grep '"Cooling_time' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+Z=`\grep -i '"redshift"' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+R500=`\grep '"R500.*kpc' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+OBS_ID=`\grep '"Obs.*ID' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+OBJ_NAME=`\grep '"Source\ Name' ${JSON_FILE} | awk -F':' '{ print $2 }' | sed -e 's/\ *"//' -e 's/"\ *,$//'`
+#CT=`\grep '"Cooling_time' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
cd ${IMG_DIR}
printf "entered img directory\n"
@@ -174,13 +203,13 @@ if [ `echo "${Z} < 0.3" | bc -l` -eq 1 ]; then
# exit ${ERR_Z}
fi
-#KPC_PER_PIX=`${COSMO_CALC} ${Z} | grep 'kpc/pixel' | awk '{ print $3 }'`
-KPC_PER_PIX=`${COSMO_CALC} ${Z} | grep 'kpc.*pix' | tr -d 'a-zA-Z_#=(),:/ '`
+#KPC_PER_PIX=`${COSMO_CALC} ${Z} | \grep 'kpc/pixel' | awk '{ print $3 }'`
+KPC_PER_PIX=`${COSMO_CALC} ${Z} | \grep 'kpc.*pix' | tr -d 'a-zA-Z_#=(),:/ '`
RC_PIX=`echo "scale=2; 0.048 * ${R500} / ${KPC_PER_PIX}" | bc -l`
# test counts_in_0.048R500<500?
RC_REG="pie(${X},${Y},0,${RC_PIX},0,360)"
punlearn dmlist
-CNT_RC=`dmlist infile="${EVT_E}[sky=${RC_REG}]" opt=block | grep 'EVENTS' | awk '{ print $8 }'`
+CNT_RC=`dmlist infile="${EVT_E}[sky=${RC_REG}]" opt=block | \grep 'EVENTS' | awk '{ print $8 }'`
printf "R500=${R500}, 0.048R500_pix=${RC_PIX}, counts_in_0.048R500=${CNT_RC}\n"
if [ ${CNT_RC} -gt 500 ]; then
F_WC=true
@@ -202,7 +231,7 @@ _EOF_
printf "CHECK the regions (R1=${R1}, R2=${R2}) ...\n"
printf "modify if necessary and save with the same name: \`${TMP_REG}'\n"
cp -fv ${TMP_REG} ${TMP_REG%.reg}_orig.reg
-ds9 ${EVT_E} -regions ${TMP_REG} -cmap sls -bin factor 4
+ds9 ${EVT_E} -regions ${TMP_REG} -cmap he -bin factor 4
read -p "> Whether the region exceeds ccd edge?(No/yes/modified) " WR_ANS
case "${WR_ANS}" in
[yY]*)
@@ -220,8 +249,8 @@ esac
punlearn dmextract
dmextract infile="${EVT_E}[bin sky=@${TMP_REG}]" outfile="${TMP_S}" exp=${EXPMAP} opt=generic clobber=yes
punlearn dmlist
-S1=`dmlist "${TMP_S}[cols SUR_FLUX]" opt="data,clean" | grep -v '#' | sed -n -e 's/\ *//' -e '1p'`
-S2=`dmlist "${TMP_S}[cols SUR_FLUX]" opt="data,clean" | grep -v '#' | sed -n -e 's/\ *//' -e '2p'`
+S1=`dmlist "${TMP_S}[cols SUR_FLUX]" opt="data,clean" | \grep -v '#' | sed -n -e 's/\ *//' -e '1p'`
+S2=`dmlist "${TMP_S}[cols SUR_FLUX]" opt="data,clean" | \grep -v '#' | sed -n -e 's/\ *//' -e '2p'`
CSB=`echo "${S1} ${S2}" | awk '{ print $1/$2/100 }'`
## back to original spc/profile directory
diff --git a/scripts/ciao_calc_ct.sh b/scripts/ciao_calc_ct.sh
index 5f76ed5..58bf566 100755
--- a/scripts/ciao_calc_ct.sh
+++ b/scripts/ciao_calc_ct.sh
@@ -3,30 +3,34 @@
unalias -a
export LC_COLLATE=C
###########################################################
-## based on `ciao_r500avgt' ##
-## for calculating the `cooling time ' ##
+## based on `ciao_r500avgt.sh' ##
+## for calculating the `cooling time' ##
## within (0.-0.048 r500) region ##
## ##
-## Junhua Gu ##
-## August 22, 2012 ##
+## Author: Junhua GU ##
+## Created: 2012/08/22 ##
## ##
-## LIweitiaNux ##
+## Modified by: Weitian LI ##
## 2013/04/28 ##
###########################################################
-
-###########################################################
-## ChangeLogs
-## v1.1, 2014/06/18
-## 'cooling_time2.sh' -> 'ciao_calc_ct.sh'
-## v1.2, 2015/05/27, Weitian LI
+##
+VERSION="v2.0"
+UPDATE="2015/06/03"
+##
+## Changelogs:
+## v2.0, 2015/06/03, Aaron LI
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Replace 'grep' with '\grep', 'ls' with '\ls'
+## * replaced 'grppha' with 'dmgroup' to group spectra
+## (dmgroup will add history to fits file, while grppha NOT)
+## * ds9 colormap changed from 'sls' to 'he'
+## v1.2, 2015/05/27, Aaron LI
## update 'DFT_ARF' & 'DFT_RMF' to find '*.arf' & '*.rmf' files
## (specextract only use .arf & .rmf extensions since revision 2014-12)
-###########################################################
-
-## about, used in `usage' {{{
-VERSION="v1.2"
-UPDATE="2015-05-27"
-## about }}}
+## v1.1, 2014/06/18
+## 'cooling_time2.sh' -> 'ciao_calc_ct.sh'
+##
## error code {{{
ERR_USG=1
@@ -49,17 +53,15 @@ ERR_UNI=61
case "$1" in
-[hH]*|--[hH]*)
printf "usage:\n"
- printf " `basename $0` basedir=<repro_dir> evt=<evt2_clean> r500=<r500_kpc> regin=<input_reg> regout=<output_reg> bkgd=<blank_evt | lbkg_reg | bkg_spec> nh=<nH> z=<redshift> arf=<arf_file> rmf=<rmf_file> [ grpcmd=<grppha_cmd> log=<log_file> ]\n"
+ printf " `basename $0` basedir=<repro_dir> evt=<evt2_clean> r500=<r500_kpc> regin=<input_reg> regout=<output_reg> bkgd=<blank_evt | lbkg_reg | bkg_spec> nh=<nH> z=<redshift> arf=<arf_file> rmf=<rmf_file> [ grouptype=<NUM_CTS|BIN> grouptypeval=<number> binspec=<binspec> log=<log_file> ]\n"
printf "\nversion:\n"
- printf "${VERSION}, ${UPDATE}\n"
+ printf " ${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
## usage, help }}}
## comology calculator {{{
-## XXX: MODIFY THIS TO YOUR OWN CASE
-## and make sure this `calc' is executable
## NOTES: use `$HOME' instead of `~' in path
BASE_PATH=`dirname $0`
COSCALC="`which cosmo_calc calc_distance 2>/dev/null | head -n 1`"
@@ -72,9 +74,9 @@ fi
## default parameters {{{
# default `event file' which used to match `blanksky' files
#DFT_EVT="_NOT_EXIST_"
-DFT_EVT="`ls evt2*_clean.fits 2> /dev/null`"
+DFT_EVT="`\ls evt2*_clean.fits 2> /dev/null`"
# default `bkgd', use `bkgcorr_blanksky*' corrected bkg spectrum
-DFT_BKGD="`ls bkgcorr_blanksky_*.pi 2> /dev/null`"
+DFT_BKGD="`\ls bkgcorr_blanksky_*.pi 2> /dev/null`"
# default basedir
DFT_BASEDIR="../.."
# default info_json pattern
@@ -85,14 +87,14 @@ DFT_REG_IN="rspec.reg"
# default output region file (0.1-0.5 r500 region)
DFT_REG_OUT="cooling.reg"
# default ARF/RMF, the one of the outmost region
-DFT_ARF="`ls r1_*.warf r1_*.arf 2> /dev/null`"
-DFT_RMF="`ls r1_*.wrmf r1_*.rmf 2> /dev/null`"
+DFT_ARF="`\ls r1_*.warf r1_*.arf 2> /dev/null`"
+DFT_RMF="`\ls r1_*.wrmf r1_*.rmf 2> /dev/null`"
-# default `group command' for `grppha'
-#DFT_GRP_CMD="group 1 128 2 129 256 4 257 512 8 513 1024 16"
-DFT_GRP_CMD="group min 20"
-# default `log file'
-DFT_LOGFILE="cooling_`date '+%Y%m%d'`.log"
+# default parameters for 'dmgroup'
+DFT_GROUPTYPE="NUM_CTS"
+DFT_GROUPTYPEVAL="20"
+#DFT_GROUPTYPE="BIN"
+DFT_BINSPEC="1:128:2,129:256:4,257:512:8,513:1024:16"
INNER=0.0
OUTER=0.048
@@ -102,6 +104,9 @@ XSPEC_SCRIPT="xspec_cooling.xcm"
# deproj xspec script, generated by `deproj_spectra'
# from which get `nh' and `redshift'
XSPEC_DEPROJ="xspec_deproj.xcm"
+
+# default `log file'
+DFT_LOGFILE="cooling_`date '+%Y%m%d'`.log"
## default parameters }}}
## functions {{{
@@ -126,7 +131,7 @@ CH_LOW=651
CH_HI=822
pb_flux() {
punlearn dmstat
- COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | grep -i 'sum:' | awk '{ print $2 }'`
+ COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | \grep -i 'sum:' | awk '{ print $2 }'`
punlearn dmkeypar
EXPTIME=`dmkeypar $1 EXPOSURE echo=yes`
BACK=`dmkeypar $1 BACKSCAL echo=yes`
@@ -179,7 +184,7 @@ if [ ! -r "${XSPEC_DEPROJ}" ]; then
read -p "> value of redshift: " REDSHIFT
else
# get `nh' and `redshift' from xspec script
- LN=`grep -n 'projct\*wabs\*apec' ${XSPEC_DEPROJ} | tail -n 1 | cut -d':' -f1`
+ LN=`\grep -n 'projct\*wabs\*apec' ${XSPEC_DEPROJ} | tail -n 1 | cut -d':' -f1`
# calc the line number of which contains `nh'
LN_NH=`expr ${LN} + 4`
NH_XCM=`head -n ${LN_NH} ${XSPEC_DEPROJ} | tail -n 1 | awk '{ print $1 }'`
@@ -215,8 +220,8 @@ fi
# json file
if [ ! -z "${json}" ] && [ -r "${BASEDIR}/${json}" ]; then
JSON_FILE="${BASEDIR}/${json}"
-elif [ `ls ${BASEDIR}/${DFT_JSON_PAT} 2> /dev/null | wc -l` -eq 1 ]; then
- JSON_FILE=`ls ${BASEDIR}/${DFT_JSON_PAT}`
+elif [ `\ls ${BASEDIR}/${DFT_JSON_PAT} 2> /dev/null | wc -l` -eq 1 ]; then
+ JSON_FILE=`\ls ${BASEDIR}/${DFT_JSON_PAT}`
else
read -p "> JSON_file: " JSON_FILE
if [ ! -r "${JSON_FILE}" ]; then
@@ -227,7 +232,7 @@ fi
printf "## use json_file: \`${JSON_FILE}'\n" | ${TOLOG}
# process `r500' {{{
-R500_RAW=`grep '"R500.*kpc' ${JSON_FILE} | sed 's/.*"R500.*":\ //' | sed 's/\ *,$//'`
+R500_RAW=`\grep '"R500.*kpc' ${JSON_FILE} | sed 's/.*"R500.*":\ //' | sed 's/\ *,$//'`
if [ ! -z "${r500}" ]; then
R500_RAW=${r500}
fi
@@ -247,7 +252,7 @@ case "${R500_UNI}" in
;;
*)
printf "## units in \`kpc', convert to \`Chandra pixel'\n" | ${TOLOG}
- KPC_PER_PIX=`${COSCALC} ${REDSHIFT} | grep 'kpc.*pix' | tr -d 'a-zA-Z_#=(),:/ '`
+ KPC_PER_PIX=`${COSCALC} ${REDSHIFT} | \grep 'kpc.*pix' | tr -d 'a-zA-Z_#=(),:/ '`
# convert scientific notation for `bc'
KPC_PER_PIX_B=`echo ${KPC_PER_PIX} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
printf "## calculated \`kpc/pixel': ${KPC_PER_PIX_B}\n"
@@ -299,7 +304,7 @@ printf "## set output regfile (0.1-0.5 r500 region): \`${REG_OUT}'\n" | ${TOLOG}
# get center position from `regin'
# only consider `pie' or `annulus'-shaped region
-TMP_REG=`grep -iE '(pie|annulus)' ${REG_IN} | head -n 1`
+TMP_REG=`\grep -iE '(pie|annulus)' ${REG_IN} | head -n 1`
XC=`echo ${TMP_REG} | tr -d ' ' | awk -F'[(),]' '{ print $2 }'`
YC=`echo ${TMP_REG} | tr -d ' ' | awk -F'[(),]' '{ print $3 }'`
printf "## get center coord: (${XC},${YC})\n" | ${TOLOG}
@@ -321,7 +326,7 @@ printf "## use bkgd: \`${BKGD}'\n" | ${TOLOG}
# determine bkg type: blanksky, lbkg_reg, bkg_spec ?
# according to file type first: text / FITS
# if FITS, then get values of `HDUCLAS1' and `OBJECT'
-if file -bL ${BKGD} | grep -qi 'text'; then
+if file -bL ${BKGD} | \grep -qi 'text'; then
printf "## given \`${BKGD}' is a \`text file'\n"
printf "## use it as local bkg region file\n"
printf "## use *LOCAL BKG SPEC*\n" | ${TOLOG}
@@ -329,9 +334,10 @@ if file -bL ${BKGD} | grep -qi 'text'; then
USE_LBKG_REG=YES
USE_BLANKSKY=NO
USE_BKG_SPEC=NO
-elif file -bL ${BKGD} | grep -qi 'FITS'; then
+elif file -bL ${BKGD} | \grep -qi 'FITS'; then
printf "## given \`${BKGD}' is a \`FITS file'\n"
# get FITS header keyword
+ punlearn dmkeypar
HDUCLAS1=`dmkeypar ${BKGD} HDUCLAS1 echo=yes`
if [ "${HDUCLAS1}" = "EVENTS" ]; then
# event file
@@ -400,21 +406,49 @@ fi
printf "## use RMF: \`${RMF}'\n" | ${TOLOG}
# arf & rmf }}}
-# check given `grpcmd'
-if [ ! -z "${grpcmd}" ]; then
- GRP_CMD="${grpcmd}"
+# check given dmgroup parameters: grouptype, grouptypeval, binspec
+if [ -z "${grouptype}" ]; then
+ GROUPTYPE="${DFT_GROUPTYPE}"
+elif [ "x${grouptype}" = "xNUM_CTS" ] || [ "x${grouptype}" = "xBIN" ]; then
+ GROUPTYPE="${grouptype}"
+else
+ printf "ERROR: given grouptype \`${grouptype}' invalid.\n"
+ exit ${ERR_GRPTYPE}
+fi
+printf "## use grouptype: \`${GROUPTYPE}'\n" | ${TOLOG}
+if [ ! -z "${grouptypeval}" ]; then
+ GROUPTYPEVAL="${grouptypeval}"
else
- GRP_CMD="${DFT_GRP_CMD}"
+ GROUPTYPEVAL="${DFT_GROUPTYPEVAL}"
fi
-printf "## use grppha cmd: \`${GRP_CMD}'\n" | ${TOLOG}
+printf "## use grouptypeval: \`${GROUPTYPEVAL}'\n" | ${TOLOG}
+if [ ! -z "${binspec}" ]; then
+ BINSPEC="${binspec}"
+else
+ BINSPEC="${DFT_BINSPEC}"
+fi
+printf "## use binspec: \`${BINSPEC}'\n" | ${TOLOG}
## parameters }}}
-##################################################
-#### main
-## D_A
-#D_A_CM=`${COSCALC} ${REDSHIFT} | grep '^d_a_cm' | awk '{ print $2 }'`
-D_A_CM=`${COSCALC} ${REDSHIFT} | grep -i 'd_a.*cm' | awk -F'=' '{ print $2 }' | awk '{ print $1 }'`
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmstat dmkeypar dmhedit dmextract dmlist dmgroup"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
+### main ###
+
+## D_A {{{
+D_A_CM=`${COSCALC} ${REDSHIFT} | \grep -i 'd_a.*cm' | awk -F'=' '{ print $2 }' | awk '{ print $1 }'`
printf "D_A_CM(${REDSHIFT})= ${D_A_CM}\n"
+## D_A }}}
## region related {{{
## generate the needed region file
@@ -427,11 +461,11 @@ _EOF_
## open the evt file to verify or modify
printf "## check the generated pie region ...\n"
printf "## if modified, save with the same name \`${REG_OUT}' (overwrite)\n"
-ds9 ${EVT} -regions ${REG_OUT} -cmap sls -bin factor 4
+ds9 ${EVT} -regions ${REG_OUT} -cmap he -bin factor 4
## check the (modified) region (pie region end angle)
printf "check the above region (for pie region end angle) ...\n"
-INVALID=`grep -i 'pie' ${REG_OUT} | awk -F'[,()]' '$7 > 360'`
+INVALID=`\grep -i 'pie' ${REG_OUT} | awk -F'[,()]' '$7 > 360'`
if [ "x${INVALID}" != "x" ]; then
printf "*** WARNING: there are pie regions' END_ANGLE > 360\n" | ${TOLOG}
printf "*** will to fix ...\n"
@@ -452,7 +486,7 @@ fi
## generate spectrum {{{
# check counts
punlearn dmlist
-CNT_RC=`dmlist infile="${EVT}[sky=region(${REG_OUT})][energy=700:7000]" opt=block | grep 'EVENTS' | awk '{ print $8 }'`
+CNT_RC=`dmlist infile="${EVT}[sky=region(${REG_OUT})][energy=700:7000]" opt=block | \grep 'EVENTS' | awk '{ print $8 }'`
if [ ${CNT_RC} -lt 500 ]; then
F_WC=true
WC="LOW_COUNTS"
@@ -468,8 +502,11 @@ dmextract infile="${EVT}[sky=region(${REG_OUT})][bin PI]" \
outfile="${AVGT_SPEC}" wmap="[bin det=8]" clobber=yes
# group spectrum
printf "group object spectrum ...\n"
-grppha infile="${AVGT_SPEC}" outfile="${AVGT_SPEC_GRP}" \
- comm="${GRP_CMD} & exit" clobber=yes > /dev/null
+punlearn dmgroup
+dmgroup infile="${AVGT_SPEC}" outfile="${AVGT_SPEC_GRP}" \
+ grouptype="${GROUPTYPE}" grouptypeval=${GROUPTYPEVAL} \
+ binspec="${BINSPEC}" xcolumn="CHANNEL" ycolumn="COUNTS" \
+ clobber=yes
# background
printf "generate the background spectrum ...\n"
@@ -585,9 +622,9 @@ else
printf "invoking XSPEC to calculate cooling time ...\n"
xspec - ${XSPEC_SCRIPT} | tee ${CT_RES}
- OBS_ID=`grep '"Obs.*ID' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
- OBJ_NAME=`grep '"Source\ Name' ${JSON_FILE} | awk -F':' '{ print $2 }' | sed -e 's/\ *"//' -e 's/"\ *,$//'`
- CT=`grep -i '^Cooling_time' ${CT_RES} | awk '{ print $2 }'`
+ OBS_ID=`\grep '"Obs.*ID' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+ OBJ_NAME=`\grep '"Source\ Name' ${JSON_FILE} | awk -F':' '{ print $2 }' | sed -e 's/\ *"//' -e 's/"\ *,$//'`
+ CT=`\grep -i '^Cooling_time' ${CT_RES} | awk '{ print $2 }'`
printf "\n" | tee -a ${CT_RES}
printf "# OBS_ID,OBJ_NAME,CT_gyr\n" | tee -a ${CT_RES}
diff --git a/scripts/ciao_calc_ct_csb.sh b/scripts/ciao_calc_ct_csb.sh
index bee6316..04ef343 100755
--- a/scripts/ciao_calc_ct_csb.sh
+++ b/scripts/ciao_calc_ct_csb.sh
@@ -1,16 +1,16 @@
#!/bin/sh
-#
-###########################################################
-## Invoke 'ciao_calc_ct.sh' and 'ciao_calc_csb.sh' ##
-## to calculate cooling time and Csb value. ##
-## ##
-## Weitian LI ##
-## 2014/06/18 ##
-## ##
-## ChangeLog: ##
-## 1.1, 2014/12/11, Weitian LI ##
-## test ${CT_RES} before read ##
-###########################################################
+##
+##
+## Invoke 'ciao_calc_ct.sh' and 'ciao_calc_csb.sh'
+## to calculate cooling time and Csb value.
+##
+## Weitian LI
+## 2014/06/18
+##
+## Changelogs:
+## v1.1, 2014/12/11, Weitian LI
+## * test ${CT_RES} before read
+##
BASE_PATH=`dirname $0`
SCRIPT_CT="${BASE_PATH}/ciao_calc_ct.sh"
diff --git a/scripts/ciao_check_offset.sh b/scripts/ciao_check_offset.sh
index af9bae1..c57bbaf 100755
--- a/scripts/ciao_check_offset.sh
+++ b/scripts/ciao_check_offset.sh
@@ -1,5 +1,6 @@
#!/bin/sh
#
+unalias -a
export LC_COLLATE=C
###########################################################
## based on `ciao_update_xcentroid.sh' ##
@@ -10,11 +11,16 @@ export LC_COLLATE=C
## Weitian LI <liweitianux@gmail.com> ##
## 2014/06/25 ##
###########################################################
-
-## about, used in `usage' {{{
-VERSION="v1.0"
-UPDATE="2014-06-25"
-## about }}}
+##
+VERSION="v2.0"
+UPDATED="2015/06/03"
+##
+## Changelogs:
+## v2.0, 2015/06/03, Aaron LI
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Replace 'grep' with '\grep', 'ls' with '\ls'
+##
## error code {{{
ERR_USG=1
@@ -40,7 +46,7 @@ case "$1" in
printf "usage:\n"
printf " `basename $0` evt=<evt_file> reg=<sbp_reg> basedir=<base_dir>\n"
printf "\nversion:\n"
- printf "${VERSION}, ${UPDATE}\n"
+ printf " ${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -53,7 +59,7 @@ OFF_CRIT_I="0:7:0" # 0deg 7min 0sec
# default `event file' which used to match `blanksky' files
#DFT_EVT="_NOT_EXIST_"
-DFT_EVT="`ls evt2*_clean.fits 2> /dev/null`"
+DFT_EVT="`\ls evt2*_clean.fits 2> /dev/null`"
# default dir which contains `asols, asol.lis, ...' files
# DFT_BASEDIR="_NOT_EXIST_"
DFT_BASEDIR=".."
@@ -85,8 +91,8 @@ getopt_keyval() {
ddmmss2deg() {
# convert 'dd:mm:ss' to 'deg'
echo "$1" | awk -F':' '
- function abs(x) { return ((x<0.0)? (-x) : x) }
- function sign(x) { return ((x<0.0)? (-1.0) : 1.0) }
+ function abs(x) { return ((x<0.0) ? (-x) : x) }
+ function sign(x) { return ((x<0.0) ? (-1.0) : 1.0) }
{
value = abs($1) + ($2)/60.0 + ($3)/3600.0;
printf("%.8f", sign($1)*value);
@@ -96,7 +102,7 @@ ddmmss2deg() {
deg2ddmmss() {
# convert 'deg' to 'dd:mm:ss'
echo "$1" | awk '
- function abs(x) { return ((x<0.0)? (-x) : x) }
+ function abs(x) { return ((x<0.0) ? (-x) : x) }
{
deg = $1;
dd = int(deg);
@@ -166,14 +172,27 @@ BASEDIR=`echo ${BASEDIR} | sed 's/\/*$//'`
printf "## use basedir: \`${BASEDIR}'\n" | ${TOLOG}
## parameters }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmkeypar dmcoords"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
## main process {{{
# asolis
-ASOLIS=`( cd ${BASEDIR} && ls ${DFT_ASOLIS_PAT} 2> /dev/null )`
+ASOLIS=`( cd ${BASEDIR} && \ls ${DFT_ASOLIS_PAT} 2> /dev/null )`
# get (x,y) from sbp region
printf "get (x,y) from ${SBP_REG}\n"
-X=`grep -iE '(pie|annulus)' ${SBP_REG} | head -n 1 | awk -F',' '{ print $1 }' | tr -d 'a-zA-Z() '`
-Y=`grep -iE '(pie|annulus)' ${SBP_REG} | head -n 1 | awk -F',' '{ print $2 }' | tr -d 'a-zA-Z() '`
+X=`\grep -iE '(pie|annulus)' ${SBP_REG} | head -n 1 | awk -F',' '{ print $1 }' | tr -d 'a-zA-Z() '`
+Y=`\grep -iE '(pie|annulus)' ${SBP_REG} | head -n 1 | awk -F',' '{ print $2 }' | tr -d 'a-zA-Z() '`
# dmcoords to convert (x,y) to (ra,dec)
printf "\`dmcoords' to convert (x,y) to (ra,dec) ...\n"
@@ -190,11 +209,11 @@ DEC_PNT=`dmkeypar infile="${EVT}" keyword="DEC_PNT" echo=yes`
## determine ACIS type {{{
punlearn dmkeypar
DETNAM=`dmkeypar ${EVT} DETNAM echo=yes`
-if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+if echo ${DETNAM} | \grep -q 'ACIS-0123'; then
#printf "## \`DETNAM' (${DETNAM}) has chips 0123\n"
#printf "## ACIS-I\n"
ACIS_TYPE="ACIS-I"
-elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+elif echo ${DETNAM} | \grep -q 'ACIS-[0-6]*7'; then
#printf "## \`DETNAM' (${DETNAM}) has chip 7\n"
#printf "## ACIS-S\n"
ACIS_TYPE="ACIS-S"
diff --git a/scripts/ciao_deproj_spectra.sh b/scripts/ciao_deproj_spectra.sh
index c869d8a..1ec5ed6 100755
--- a/scripts/ciao_deproj_spectra.sh
+++ b/scripts/ciao_deproj_spectra.sh
@@ -37,10 +37,14 @@
##
AUTHOR="Weitian LI <liweitianux@gmail.com>"
CREATED="2012/07/24"
-UPDATED="2015/02/12"
-VERSION="v9.0"
+UPDATED="2015/06/03"
+VERSION="v10.0"
##
## ChangeLogs:
+## v10.0, 2015/06/03, Aaron LI
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Replace 'ls' with '\ls'
## v9.0, 2015/02/12, Weitian LI
## * updated parameter settings for 'specextract' to match
## specextract revision 2013-12.
@@ -98,7 +102,7 @@ esac
## default parameters {{{
# default `event file' which used to match `blanksky' files
#DFT_EVT="_NOT_EXIST_"
-DFT_EVT="`ls evt2*_clean.fits`"
+DFT_EVT="`\ls evt2*_clean.fits`"
# default `radial region file'
#DFT_REG_IN="_NOT_EXIST_"
DFT_REG_IN="rspec.reg"
@@ -371,28 +375,28 @@ fi
# check files in `basedir'
printf "check needed files in basedir \`${BASEDIR}' ...\n"
# check asolis files
-ASOLIS=`ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
+ASOLIS=`\ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
if [ -z "${ASOLIS}" ]; then
printf "ERROR: cannot find \"${DFT_ASOLIS_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_ASOL}
fi
printf "## use asolis: \`${ASOLIS}'\n" | ${TOLOG}
# check badpixel file
-BPIX=`ls -1 ${BASEDIR}/${DFT_BPIX_PAT} | head -n 1`
+BPIX=`\ls -1 ${BASEDIR}/${DFT_BPIX_PAT} | head -n 1`
if [ -z "${BPIX}" ]; then
printf "ERROR: cannot find \"${DFT_BPIX_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_BPIX}
fi
printf "## use badpixel: \`${BPIX}'\n" | ${TOLOG}
# check pbk file
-PBK=`ls -1 ${BASEDIR}/${DFT_PBK_PAT} | head -n 1`
+PBK=`\ls -1 ${BASEDIR}/${DFT_PBK_PAT} | head -n 1`
if [ -z "${PBK}" ]; then
printf "ERROR: cannot find \"${DFT_PBK_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_PBK}
fi
printf "## use pbk: \`${PBK}'\n" | ${TOLOG}
# check msk file
-MSK=`ls -1 ${BASEDIR}/${DFT_MSK_PAT} | head -n 1`
+MSK=`\ls -1 ${BASEDIR}/${DFT_MSK_PAT} | head -n 1`
if [ -z "${MSK}" ]; then
printf "ERROR: cannot find \"${DFT_MSK_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_MSK}
@@ -400,6 +404,19 @@ fi
printf "## use msk: \`${MSK}'\n" | ${TOLOG}
## check files }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmstat dmkeypar dmhedit specextract dmextract dmgroup"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
## process local background {{{
if [ "${USE_LBKG_REG}" = "YES" ]; then
BKG_EVT=${EVT}
diff --git a/scripts/ciao_expcorr.sh b/scripts/ciao_expcorr.sh
index c1d7905..ecb2806 100755
--- a/scripts/ciao_expcorr.sh
+++ b/scripts/ciao_expcorr.sh
@@ -1,10 +1,13 @@
#!/bin/sh
##
-## make `image' from `evt file'
-## make `spectral weight' using `make_instmap_weights'
-## use `fluximage' to generating `exposure map'
-## make `exposure-corrected' image
-## and extract `surface brighness profile'
+## This script performs exposure correction to images, and
+## finally produce the exposure-corrected image.
+##
+## * make `image' from `evt file'
+## * make `spectral weight' using `make_instmap_weights'
+## * use `fluximage' to generating `exposure map'
+## * make `exposure-corrected' image
+## * and extract `surface brighness profile'
##
## NOTES:
## only ACIS-I (chip: 0-3) and ACIS-S (chip: 7) supported
@@ -12,16 +15,23 @@
##
## Weitian LI
## 2012/08/16
-UPDATED="2014/10/30"
+##
+VERSION="v3.0"
+UPDATED="2015/06/03"
##
## ChangeLogs:
+## v3.0, 2015/06/03, Aaron LI
+## * Changed to use 'fluximage' by default, and fallback to 'merge_all'
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Replaced 'grep' with '\grep', 'ls' with '\ls'
## v2.1, 2014/10/30, Weitian LI
## Add 'aspect' parameter for 'skyfov' to fix the 'FoV shift bug'
## v2.0, 2014/07/29, Weitian LI
## `merge_all' deprecated, use `fluximage' if possible
-## v1.2, 2012-08-21, LIweitiaNux
+## v1.2, 2012/08/21, Weitian LI
## set `ardlib' before process `merge_all'
-## v1.1, 2012-08-21, LIweitiaNux
+## v1.1, 2012/08/21, Weitian LI
## fix a bug with `sed'
##
@@ -43,6 +53,7 @@ ERR_SPEC=32
ERR_DET=41
ERR_ENG=42
ERR_CIAO=100
+ERR_TOOL=110
## error code }}}
## usage, help {{{
@@ -50,8 +61,8 @@ case "$1" in
-[hH]*|--[hH]*)
printf "usage:\n"
printf " `basename $0` evt=<evt_file> energy=<e_start:e_end:e_width> basedir=<base_dir> nh=<nH> z=<redshift> temp=<avg_temperature> abund=<avg_abund> [ logfile=<log_file> ]\n"
- printf "\nupdated:\n"
- printf "${UPDATED}\n"
+ printf "\nversion:\n"
+ printf " ${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -60,7 +71,7 @@ esac
## default parameters {{{
# default `event file' which used to match `blanksky' files
#DFT_EVT="_NOT_EXIST_"
-DFT_EVT="`ls evt2*_clean.fits 2> /dev/null`"
+DFT_EVT="`\ls evt2*_clean.fits 2> /dev/null`"
# default dir which contains `asols, asol.lis, ...' files
# DFT_BASEDIR="_NOT_EXIST_"
DFT_BASEDIR=".."
@@ -225,23 +236,36 @@ BASEDIR=`echo ${BASEDIR} | sed 's/\/*$//'`
printf "## use basedir: \`${BASEDIR}'\n" | ${TOLOG}
## parameters }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmkeypar dmcopy dmhedit dmimgcalc ardlib make_instmap_weights skyfov get_sky_limits fluximage merge_all"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
## check files in `basedir' {{{
# check asol files
-ASOLIS=`ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
+ASOLIS=`\ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
if [ -z "${ASOLIS}" ]; then
printf "ERROR: cannot find \"${DFT_ASOLIS_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_ASOL}
fi
printf "## use asolis: \`${ASOLIS}'\n"
# check badpixel file
-BPIX=`ls -1 ${BASEDIR}/${DFT_BPIX_PAT} | head -n 1`
+BPIX=`\ls -1 ${BASEDIR}/${DFT_BPIX_PAT} | head -n 1`
if [ -z "${BPIX}" ]; then
printf "ERROR: cannot find \"${DFT_BPIX_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_BPIX}
fi
printf "## use badpixel: \`${BPIX}'\n" | ${TOLOG}
# check msk file
-MSK=`ls -1 ${BASEDIR}/${DFT_MSK_PAT} | head -n 1`
+MSK=`\ls -1 ${BASEDIR}/${DFT_MSK_PAT} | head -n 1`
if [ -z "${MSK}" ]; then
printf "ERROR: cannot find \"${DFT_MSK_PAT}\" in dir \`${BASEDIR}'\n"
exit ${ERR_MSK}
@@ -253,14 +277,14 @@ printf "## use msk file: \`${MSK}'\n" | ${TOLOG}
# consistent with `ciao_procevt'
punlearn dmkeypar
DETNAM=`dmkeypar ${EVT} DETNAM echo=yes`
-if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+if echo ${DETNAM} | \grep -q 'ACIS-0123'; then
printf "## \`DETNAM' (${DETNAM}) has chips 0123\n"
printf "## ACIS-I\n"
ACIS_TYPE="ACIS-I"
CCD="0:3"
NEW_DETNAM="ACIS-0123"
ROOTNAME="c0-3_e${E_START}-${E_END}"
-elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+elif echo ${DETNAM} | \grep -q 'ACIS-[0-6]*7'; then
printf "## \`DETNAM' (${DETNAM}) has chip 7\n"
printf "## ACIS-S\n"
ACIS_TYPE="ACIS-S"
@@ -273,7 +297,6 @@ else
fi
## ACIS type }}}
-
## main process {{{
## set `ardlib' at first
printf "set \`ardlib' first ...\n"
@@ -330,14 +353,25 @@ printf "## get \`xygrid': \`${XYGRID}'\n" | ${TOLOG}
EXPMAP="expmap_${ROOTNAME}.fits"
IMG_EXPCORR="img_expcorr_${ROOTNAME}.fits"
-if `which merge_all >/dev/null 2>&1`; then
- # merge_all available
- printf "merge_all ...\n"
- ## set `ardlib' again to make sure the matched bpix file specified
- printf "set \`ardlib' again for \`merge_all' ...\n"
- punlearn ardlib
- acis_set_ardlib badpixfile="${BPIX}"
-
+if `which fluximage >/dev/null 2>&1`; then
+ ## 'fluximage' is available, use it by default
+ ## use 'fluximage' to generate `exposure map' and apply exposure correction
+ printf "use fluximage to generate expmap and apply correction ...\n"
+ punlearn fluximage
+ fluximage infile="${EVT_E}" outroot="${ROOTNAME}" \
+ binsize=1 bands="${SPEC_WGT}" xygrid="${XYGRID}" \
+ asol="@${ASOLIS}" badpixfile="${BPIX}" \
+ maskfile="${MSK}" clobber=yes
+ ## make symbolic links
+ # clipped counts image
+ ln -svf ${ROOTNAME}*band*thresh.img ${IMG_ORIG%.fits}_thresh.fits
+ # clipped exposure map
+ ln -svf ${ROOTNAME}*band*thresh.expmap ${EXPMAP}
+ # exposure-corrected image
+ ln -svf ${ROOTNAME}*band*flux.img ${IMG_EXPCORR}
+elif `which merge_all >/dev/null 2>&1`; then
+ # 'merge_all' is available, fallback to this
+ printf "fallback to merge_all ...\n"
## XXX: `merge_all' needs `asol files' in working directory
printf "link asol files into currect dir (\`merge_all' needed) ...\n"
for f in `cat ${ASOLIS}`; do
@@ -359,21 +393,9 @@ if `which merge_all >/dev/null 2>&1`; then
dmimgcalc infile="${IMG_ORIG}" infile2="${EXPMAP}" \
outfile="${IMG_EXPCORR}" operation=div clobber=yes
else
- ## `merge_all' deprecated and not available
- ## use 'fluximage' to generate `exposure map' and apply exposure correction
- printf "fluximage ...\n"
- punlearn fluximage
- fluximage infile="${EVT_E}" outroot="${ROOTNAME}" \
- binsize=1 bands="${SPEC_WGT}" xygrid="${XYGRID}" \
- asol="@${ASOLIS}" badpixfile="${BPIX}" \
- maskfile="${MSK}" clobber=yes
- ## make symbolic links
- # clipped counts image
- ln -svf ${ROOTNAME}*band*thresh.img ${IMG_ORIG%.fits}_thresh.fits
- # clipped exposure map
- ln -svf ${ROOTNAME}*band*thresh.expmap ${EXPMAP}
- # exposure-corrected image
- ln -svf ${ROOTNAME}*band*flux.img ${IMG_EXPCORR}
+ # neither 'fluximage' nor 'merge_all' is available
+ printf "ERROR: neither 'fluximage' nor 'merge_all' is available\n"
+ exit ${ERR_TOOL}
fi
## main }}}
diff --git a/scripts/ciao_expcorr_sbp.sh b/scripts/ciao_expcorr_sbp.sh
index 022bfcf..5416085 100755
--- a/scripts/ciao_expcorr_sbp.sh
+++ b/scripts/ciao_expcorr_sbp.sh
@@ -3,7 +3,14 @@
## Make exposure map and exposure-corrected image (revoke 'ciao_expcorr.sh'),
## and extract surface brightness profile (revoke 'ciao_sbp.sh').
##
-## ChangeLogs:
+## Author: Weitian LI
+##
+VERSION="v4.3"
+UPDATED="2015/06/03"
+##
+## Changelogs:
+## v4.3, 2015/06/03, Aaron LI
+## * Replaced 'ls' with '\ls'
## v4.2, 2015/03/05, Weitian LI
## * Added exit code check for the 'EXPCORR_SCRIPT' and 'EXTRACT_SBP_SCRIPT'
## * Removed the code of 'spc_fit.cfg' generation
@@ -11,14 +18,12 @@
## v4.1, 2014/10/30, Weitian LI
## * updated 'EXPCORR_SCRIPT' & 'EXTRACT_SBP_SCRIPT',
## * removed version number in scripts filename.
-## v4, 2013/10/12, LIweitiaNux
+## v4, 2013/10/12, Weitian LI
## * split out the 'generate regions' parts -> 'ciao_genreg_v1.sh'
-## v3, 2013/05/03, LIweitiaNux
+## v3, 2013/05/03, Weitian LI
## * add parameter 'ds9' to check the centroid and regions
##
-UPDATED="2015/03/05"
-
unalias -a
export LC_COLLATE=C
@@ -49,7 +54,8 @@ case "$1" in
-[hH]*|--[hH]*)
printf "usage:\n"
printf " `basename $0` evt=<evt2_clean> sbp_reg=<sbprofile.reg> nh=<nh> z=<redshift> temp=<avg_temp> abund=<avg_abund> cellreg=<celld_reg> expcorr=<yes|no>\n"
- printf "\nVersion: ${UPDATED}\n"
+ printf "\nversion:\n"
+ printf " ${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -57,7 +63,7 @@ esac
## default parameters {{{
## clean evt2 file
-DFT_EVT=`ls evt2*_clean.fits 2> /dev/null`
+DFT_EVT=`\ls evt2*_clean.fits 2> /dev/null`
## the repro dir
DFT_BASEDIR=".."
@@ -68,10 +74,10 @@ DFT_SBP_REG="sbprofile.reg"
DFT_LOGFILE="expcorr_sbp_`date '+%Y%m%d'`.log"
## cell region
-DFT_CELL_REG=`ls celld*.reg 2> /dev/null`
+DFT_CELL_REG=`\ls celld*.reg 2> /dev/null`
## background spectra
-DFT_BKGD=`ls bkgcorr_bl*.pi 2> /dev/null`
+DFT_BKGD=`\ls bkgcorr_bl*.pi 2> /dev/null`
## default parameters }}}
## functions {{{
@@ -215,8 +221,8 @@ else
printf "======== EXPOSURE CORRECTION FINISHED =======\n\n"
fi
-EXPMAP=`ls expmap*e700-7000*fits 2> /dev/null`
-EVT_E=`ls evt*e700-7000*fits 2> /dev/null`
+EXPMAP=`\ls expmap*e700-7000*fits 2> /dev/null`
+EVT_E=`\ls evt*e700-7000*fits 2> /dev/null`
printf "======== EXTRACT SBP =======\n"
CMD="${SCRIPT_DIR}/${EXTRACT_SBP_SCRIPT} evt_e=${EVT_E} reg=${SBP_REG} expmap=${EXPMAP} cellreg=${CELL_REG}"
diff --git a/scripts/ciao_genregs.sh b/scripts/ciao_genregs.sh
index 643acba..1f6c58a 100755
--- a/scripts/ciao_genregs.sh
+++ b/scripts/ciao_genregs.sh
@@ -1,15 +1,27 @@
#!/bin/sh
-unalias -a
-export LC_COLLATE=C
-
-#####################################################################
-## generate 'radius spectra profile' regions
-## and 'radius surface profile' regions
##
-## ChangeLogs:
-## v1, 2013/10/12, LIweitiaNux
+## Generate regions for 'radial spectra analysis' (rspec.reg),
+## and regions for 'surface brightness profile' extraction.
+##
+## Author: Weitian LI
+## Created: 2013/10/12
+##
+VERSION="v1.0"
+UPDATE="2013/10/12"
+##
+## Changelogs:
+## v2.0, 2015/06/03, Aaron LI
+## * Updated script description
+## * Replaced 'grep' with '\grep', 'ls' with '\ls'
+## * ds9 colormap changed from 'sls' to 'he'
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## v1.0, 2013/10/12, Weitian LI
## split from 'ciao_expcorr_sbp_v3.sh'
-#####################################################################
+##
+
+unalias -a
+export LC_COLLATE=C
SCRIPT_PATH=`readlink -f $0`
SCRIPT_DIR=`dirname ${SCRIPT_PATH}`
@@ -17,11 +29,6 @@ XCENTROID_SCRIPT="chandra_xcentroid.sh"
GEN_SPCREG_SCRIPT="chandra_genspcreg.sh"
GEN_SBPREG_SCRIPT="chandra_gensbpreg.sh"
-## about, used in `usage' {{{
-VERSION="v1"
-UPDATE="2013-10-12"
-## about }}}
-
## err code {{{
ERR_USG=1
ERR_DIR=11
@@ -45,22 +52,22 @@ case "$1" in
printf "usage:\n"
printf " `basename $0` evt=<evt2_clean> reg_in=<reg_in> bkgd=<bkgd_spec> ds9=<Yes|no>\n"
printf "\nversion:\n"
- printf "${VERSION}, ${UPDATE}\n"
+ printf " ${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
## usage }}}
## link needed files {{{
-BKGD_FILE=`ls ../bkg/bkgcorr_bl*.pi 2> /dev/null | head -n 1`
+BKGD_FILE=`\ls ../bkg/bkgcorr_bl*.pi 2> /dev/null | head -n 1`
if [ -r "${BKGD_FILE}" ]; then
ln -svf ${BKGD_FILE} .
fi
-ASOL_FILE=`ls ../pcad*_asol?.fits 2> /dev/null`
+ASOL_FILE=`\ls ../pcad*_asol?.fits 2> /dev/null`
if [ -r "${ASOL_FILE}" ]; then
ln -svf ${ASOL_FILE} .
fi
-CELL_REG_FILE=`ls ../evt/celld*.reg 2> /dev/null | grep -v 'orig'`
+CELL_REG_FILE=`\ls ../evt/celld*.reg 2> /dev/null | \grep -v 'orig'`
if [ -r "${CELL_REG_FILE}" ]; then
ln -svf ${CELL_REG_FILE} .
fi
@@ -68,11 +75,11 @@ fi
## default parameters {{{
## clean evt2 file
-DFT_EVT=`ls evt2*_clean.fits 2> /dev/null`
+DFT_EVT=`\ls evt2*_clean.fits 2> /dev/null`
## the repro dir
DFT_BASEDIR=".."
# default `asol file'
-ASOL="`ls pcadf*_asol1.fits 2> /dev/null | head -n 1`"
+ASOL="`\ls pcadf*_asol1.fits 2> /dev/null | head -n 1`"
## energy range
# format: `E_START:E_END:E_WIDTH'
@@ -84,7 +91,7 @@ E_END=`echo ${DFT_ENERGY} | awk -F':' '{ print $2 }'`
DFT_LOGFILE="genreg_`date '+%Y%m%d'`.log"
## background spectra
-DFT_BKGD=`ls bkgcorr_bl*.pi | head -n 1`
+DFT_BKGD=`\ls bkgcorr_bl*.pi | head -n 1`
## default parameters }}}
## functions {{{
@@ -188,18 +195,31 @@ else
fi
## parameters }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmkeypar dmcopy dmcoords"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
## determine ACIS type {{{
# consistent with `ciao_procevt'
punlearn dmkeypar
DETNAM=`dmkeypar ${EVT} DETNAM echo=yes`
-if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+if echo ${DETNAM} | \grep -q 'ACIS-0123'; then
printf "## \`DETNAM' (${DETNAM}) has chips 0123\n"
printf "## ACIS-I\n"
ACIS_TYPE="ACIS-I"
CCD="0:3"
NEW_DETNAM="ACIS-0123"
ROOTNAME="c0-3_e${E_START}-${E_END}"
-elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+elif echo ${DETNAM} | \grep -q 'ACIS-[0-6]*7'; then
printf "## \`DETNAM' (${DETNAM}) has chip 7\n"
printf "## ACIS-S\n"
ACIS_TYPE="ACIS-S"
@@ -224,19 +244,19 @@ printf "======== X-RAY CENTROID =======\n"
CMD="${SCRIPT_DIR}/${XCENTROID_SCRIPT} evt=${EVT} reg=${REG_IN} conv=yes 2>&1 | tee xcentroid.dat"
printf "CMD: $CMD\n"
${SCRIPT_DIR}/${XCENTROID_SCRIPT} evt=${EVT} reg=${REG_IN} conv=yes 2>&1 | tee xcentroid.dat
-X=`grep '(X,Y)' xcentroid.dat | tr -d ' XY():' | awk -F',' '{ print $2 }'`
-Y=`grep '(X,Y)' xcentroid.dat | tr -d ' XY():' | awk -F',' '{ print $3 }'`
+X=`\grep '(X,Y)' xcentroid.dat | tr -d ' XY():' | awk -F',' '{ print $2 }'`
+Y=`\grep '(X,Y)' xcentroid.dat | tr -d ' XY():' | awk -F',' '{ print $3 }'`
CNTRD_WCS_REG="centroid_wcs.reg"
CNTRD_PHY_REG="centroid_phy.reg"
printf "## X centroid: ($X,$Y)\n"
if [ "${F_DS9}" = "YES" ]; then
printf "check the X centroid ...\n"
- ds9 ${EVT_E} -regions ${CNTRD_PHY_REG} -cmap sls -bin factor 4
+ ds9 ${EVT_E} -regions ${CNTRD_PHY_REG} -cmap he -bin factor 4
fi
X0=$X
Y0=$Y
-X=`grep -i 'point' ${CNTRD_PHY_REG} | head -n 1 | tr -d 'a-zA-Z() ' | awk -F',' '{ print $1 }'`
-Y=`grep -i 'point' ${CNTRD_PHY_REG} | head -n 1 | tr -d 'a-zA-Z() ' | awk -F',' '{ print $2 }'`
+X=`\grep -i 'point' ${CNTRD_PHY_REG} | head -n 1 | tr -d 'a-zA-Z() ' | awk -F',' '{ print $1 }'`
+Y=`\grep -i 'point' ${CNTRD_PHY_REG} | head -n 1 | tr -d 'a-zA-Z() ' | awk -F',' '{ print $2 }'`
if [ "x${X}" != "x${X0}" ] || [ "x${Y}" != "x${Y0}" ]; then
printf "## X CENTROID CHANGED -> ($X,$Y)\n"
# update ${CNTRD_WCS_REG}
@@ -269,7 +289,7 @@ printf "CMD: ${CMD}\n"
${SCRIPT_DIR}/${GEN_SPCREG_SCRIPT} ${EVT} ${EVT_E} ${BKGD} ${X} ${Y} ${SPC_REG}
if [ "${F_DS9}" = "YES" ]; then
printf "check SPC regions ...\n"
- ds9 ${EVT_E} -regions ${SPC_REG} -cmap sls -bin factor 4
+ ds9 ${EVT_E} -regions ${SPC_REG} -cmap he -bin factor 4
fi
printf "======== GENERATE SPECTRUM REGIONS FINISHED =======\n\n"
diff --git a/scripts/ciao_procevt.sh b/scripts/ciao_procevt.sh
index d73bfaf..90162bb 100755
--- a/scripts/ciao_procevt.sh
+++ b/scripts/ciao_procevt.sh
@@ -1,5 +1,7 @@
#!/bin/sh
#
+unalias -a
+export LC_COLLATE=C
###########################################################
## process `evt2 file' generated by `chandra_repro' ##
## to produce a `clean evt2 file' ##
@@ -13,23 +15,26 @@
## if `DETNAM' has `0123', then `ACIS-I' ##
## if `DETNAM' has `7', then `ACIS-S' ##
## ##
-## LIweitiaNux <liweitianux@gmail.com> ##
-## August 16, 2012 ##
-###########################################################
-
+## Weitian LI <liweitianux@gmail.com># ##
+## 2012/08/16 ##
###########################################################
+##
+VERSION="v3.0"
+UPDATED="2015/06/02"
+##
## ChangeLogs:
+## v3.0, 2015/06/02, Aaron LI
+## * Added 'unalias -a' and 'export LC_COLLATE=C'
+## * Replaced 'grep' with '\grep', 'ls' with '\ls'
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
## v2.2, 2014/10/30, Weitian LI
-## small fix to the generation of chips script:
-## changed '>>' to '>'
-## v2.1, 2012/08/16, LIweitiaNux
+## small fix to the generation of chips script:
+## changed '>>' to '>'
+## v2.1, 2012/08/16, Weitian LI
## improve invoke `chips', run it in a separate terminal
-###########################################################
+##
-## about, used in `usage' {{{
-VERSION="v2"
-UPDATE="2012-08-16"
-## about }}}
## error code {{{
ERR_USG=1
@@ -52,7 +57,7 @@ case "$1" in
printf "usage:\n"
printf " `basename $0` evt=<raw_evt_file>\n"
printf "\nversion:\n"
- printf "${VERSION}, ${UPDATE}\n"
+ printf " ${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -61,7 +66,7 @@ esac
## default parameters {{{
# default `event file' which used to match `blanksky' files
#DFT_EVT="_NOT_EXIST_"
-DFT_EVT="`ls acisf?????*_repro_evt2.fits 2> /dev/null`"
+DFT_EVT="`\ls acisf?????*_repro_evt2.fits 2> /dev/null`"
## default parameters }}}
## functions {{{
@@ -100,16 +105,29 @@ fi
printf "## use evt file: \`${EVT}'\n"
## parameters }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmkeypar dmcopy celldetect dmextract"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
## determine ACIS type {{{
punlearn dmkeypar
DETNAM=`dmkeypar ${EVT} DETNAM echo=yes`
-if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+if echo ${DETNAM} | \grep -q 'ACIS-0123'; then
printf "## \`DETNAM' (${DETNAM}) has chips 0123\n"
printf "## ACIS-I\n"
ACIS_TYPE="ACIS-I"
CCD="0:3"
ROOTNAME="evt2_c`echo ${CCD} | tr ':' '-'`"
-elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+elif echo ${DETNAM} | \grep -q 'ACIS-[0-6]*7'; then
printf "## \`DETNAM' (${DETNAM}) has chip 7\n"
printf "## ACIS-S\n"
ACIS_TYPE="ACIS-S"
diff --git a/scripts/ciao_r500avgt.sh b/scripts/ciao_r500avgt.sh
index 3318bb4..feed110 100755
--- a/scripts/ciao_r500avgt.sh
+++ b/scripts/ciao_r500avgt.sh
@@ -15,11 +15,10 @@ export LC_COLLATE=C
## 3) ARF/RMF files either provided or use the ARF/RMF ##
## of the outmost region ##
## ##
-## LIweitiaNux <liweitianux@gmail.com> ##
-## August 22, 2012 ##
-###########################################################
-
+## Weitian LI <liweitianux@gmail.com> ##
+## 2012/08/22 ##
###########################################################
+##
## ChangeLogs
## v1.1, 2012/08/26, LIweitiaNux
## modify `KPC_PER_PIX', able to use the newest version `calc_distance'
@@ -32,15 +31,20 @@ export LC_COLLATE=C
## change `DFT_GRP_CMD' to `group 1 128 4 ...'
## v3.0, 2013/02/09, LIweitiaNux
## modify for new process
-## v3.1, 2015/05/27, Weitian LI
+## v3.1, 2015/05/27, Aaron LI
## update 'DFT_ARF' & 'DFT_RMF' to find '*.arf' & '*.rmf' files
## (specextract only use .arf & .rmf extensions since revision 2014-12)
-###########################################################
-
-## about, used in `usage' {{{
-VERSION="v3.1"
-UPDATE="2015-05-27"
-## about }}}
+## v3.2, 2015/05/30, Aaron LI
+## Added options '-cmap he -bin factor 4' to ds9 command
+## v4.0, 2015/06/03, Aaron LI
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Replace 'grep' with '\grep', 'ls' with '\ls'
+## * replaced 'grppha' with 'dmgroup' to group spectra
+## (dmgroup will add history to fits file, while grppha NOT)
+##
+VERSION="v4.1"
+UPDATED="2015/06/03"
## error code {{{
ERR_USG=1
@@ -64,9 +68,9 @@ ERR_UNI=61
case "$1" in
-[hH]*|--[hH]*)
printf "usage:\n"
- printf " `basename $0` evt=<evt2_clean> r500=<r500_kpc> basedir=<basedir> info=<info_json> inner=<inner_val> outer=<outer_val> regin=<input_reg> regout=<output_reg> bkgd=<blank_evt|lbkg_reg|bkg_spec> nh=<nH> z=<redshift> arf=<arf_file> rmf=<rmf_file> [ grpcmd=<grppha_cmd> log=<log_file> ]\n"
+ printf " `basename $0` evt=<evt2_clean> r500=<r500_kpc> basedir=<basedir> info=<info_json> inner=<inner_val> outer=<outer_val> regin=<input_reg> regout=<output_reg> bkgd=<blank_evt|lbkg_reg|bkg_spec> nh=<nH> z=<redshift> arf=<arf_file> rmf=<rmf_file> [ grouptype=<NUM_CTS|BIN> grouptypeval=<number> binspec=<binspec> log=<log_file> ]\n"
printf "\nversion:\n"
- printf "${VERSION}, ${UPDATE}\n"
+ printf " ${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -88,9 +92,9 @@ fi
## default parameters {{{
# default `event file' which used to match `blanksky' files
#DFT_EVT="_NOT_EXIST_"
-DFT_EVT="`ls evt2*_clean.fits 2> /dev/null`"
+DFT_EVT="`\ls evt2*_clean.fits 2> /dev/null`"
# default `bkgd', use `bkgcorr_blanksky*' corrected bkg spectrum
-DFT_BKGD="`ls bkgcorr_blanksky_*.pi 2> /dev/null`"
+DFT_BKGD="`\ls bkgcorr_blanksky_*.pi 2> /dev/null`"
# default basedir
DFT_BASEDIR="../.."
# default `radial region file'
@@ -100,18 +104,20 @@ DFT_REG_IN="rspec.reg"
DFT_INNER="0.1"
DFT_OUTER="0.5"
# default ARF/RMF, the one of the outmost region
-DFT_ARF="`ls -1 r?_*.warf r?_*.arf 2> /dev/null | tail -n 1`"
-DFT_RMF="`ls -1 r?_*.wrmf r?_*.rmf 2> /dev/null | tail -n 1`"
+DFT_ARF="`\ls -1 r?_*.warf r?_*.arf 2> /dev/null | tail -n 1`"
+DFT_RMF="`\ls -1 r?_*.wrmf r?_*.rmf 2> /dev/null | tail -n 1`"
-# default `group command' for `grppha'
-DFT_GRP_CMD="group min 25"
-#DFT_GRP_CMD="group 1 128 2 129 256 4 257 512 8 513 1024 16"
-#DFT_GRP_CMD="group 1 128 4 129 256 8 257 512 16 513 1024 32"
-# default `log file'
-DFT_LOGFILE="r500avgt_`date '+%Y%m%d%H'`.log"
+# default parameters for 'dmgroup'
+DFT_GROUPTYPE="NUM_CTS"
+DFT_GROUPTYPEVAL="25"
+#DFT_GROUPTYPE="BIN"
+DFT_BINSPEC="1:128:2,129:256:4,257:512:8,513:1024:16"
# default JSON pattern
DFT_JSON_PAT="*_INFO.json"
+
+# default `log file'
+DFT_LOGFILE="r500avgt_`date '+%Y%m%d%H'`.log"
## default parameters }}}
## functions {{{
@@ -136,7 +142,7 @@ CH_LOW=651
CH_HI=822
pb_flux() {
punlearn dmstat
- COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | grep -i 'sum:' | awk '{ print $2 }'`
+ COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | \grep -i 'sum:' | awk '{ print $2 }'`
punlearn dmkeypar
EXPTIME=`dmkeypar $1 EXPOSURE echo=yes`
BACK=`dmkeypar $1 BACKSCAL echo=yes`
@@ -151,6 +157,7 @@ bkg_renorm() {
# $1: src spectrum, $2: back spectrum
PBFLUX_SRC=`pb_flux $1`
PBFLUX_BKG=`pb_flux $2`
+ punlearn dmkeypar
BACK_OLD=`dmkeypar $2 BACKSCAL echo=yes`
BACK_OLD_B=`echo "( ${BACK_OLD} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
BACK_NEW=`echo "scale = 16; ${BACK_OLD_B} * ${PBFLUX_BKG} / ${PBFLUX_SRC}" | bc -l`
@@ -189,7 +196,7 @@ fi
if [ ! -z "${json}" ] && [ -r "${BASEDIR}/${json}" ]; then
JSON_FILE="${BASEDIR}/${json}"
elif ls ${BASEDIR}/${DFT_JSON_PAT} > /dev/null 2>&1; then
- JSON_FILE=`ls ${BASEDIR}/${DFT_JSON_PAT}`
+ JSON_FILE=`\ls ${BASEDIR}/${DFT_JSON_PAT}`
else
read -p "> JSON_file: " JSON_FILE
if [ ! -r "${JSON_FILE}" ]; then
@@ -200,8 +207,8 @@ fi
printf "## use json_file: \`${JSON_FILE}'\n" | ${TOLOG}
# process `nh' and `redshift' {{{
-NH_JSON=`grep '"nH' ${JSON_FILE} | sed 's/.*"nH.*":\ //' | sed 's/\ *,$//'`
-Z_JSON=`grep '"redshift' ${JSON_FILE} | sed 's/.*"redshift.*":\ //' | sed 's/\ *,$//'`
+NH_JSON=`\grep '"nH' ${JSON_FILE} | sed 's/.*"nH.*":\ //' | sed 's/\ *,$//'`
+Z_JSON=`\grep '"redshift' ${JSON_FILE} | sed 's/.*"redshift.*":\ //' | sed 's/\ *,$//'`
printf "## get nh: \`${NH_JSON}' (from \`${JSON_FILE}')\n" | ${TOLOG}
printf "## get redshift: \`${Z_JSON}' (from \`${JSON_FILE}')\n" | ${TOLOG}
## if `nh' and `redshift' supplied in cmdline, then use them
@@ -235,7 +242,7 @@ printf "## region range: (${INNER} - ${OUTER} R500)\n" | ${TOLOG}
# range }}}
# process `r500' {{{
-R500_RAW=`grep '"R500.*kpc' ${JSON_FILE} | sed 's/.*"R500.*":\ //' | sed 's/\ *,$//'`
+R500_RAW=`\grep '"R500.*kpc' ${JSON_FILE} | sed 's/.*"R500.*":\ //' | sed 's/\ *,$//'`
if [ ! -z "${r500}" ]; then
R500_RAW=${r500}
fi
@@ -255,7 +262,7 @@ case "${R500_UNI}" in
;;
*)
printf "## units in \`kpc', convert to \`Chandra pixel'\n" | ${TOLOG}
- KPC_PER_PIX=`${COSCALC} ${REDSHIFT} | grep 'kpc.*pix' | tr -d 'a-zA-Z_#=(),:/ '`
+ KPC_PER_PIX=`${COSCALC} ${REDSHIFT} | \grep 'kpc.*pix' | tr -d 'a-zA-Z_#=(),:/ '`
# convert scientific notation for `bc'
KPC_PER_PIX_B=`echo ${KPC_PER_PIX} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
printf "## calculated \`kpc/pixel': ${KPC_PER_PIX_B}\n"
@@ -307,7 +314,7 @@ printf "## set output regfile: \`${REG_OUT}'\n" | ${TOLOG}
# get center position from `regin'
# only consider `pie' or `annulus'-shaped region
-TMP_REG=`grep -iE '(pie|annulus)' ${REG_IN} | head -n 1`
+TMP_REG=`\grep -iE '(pie|annulus)' ${REG_IN} | head -n 1`
XC=`echo ${TMP_REG} | tr -d 'a-zA-Z() ' | awk -F',' '{ print $1 }'`
YC=`echo ${TMP_REG} | tr -d 'a-zA-Z() ' | awk -F',' '{ print $2 }'`
printf "## get center coord: (${XC},${YC})\n" | ${TOLOG}
@@ -329,7 +336,7 @@ printf "## use bkgd: \`${BKGD}'\n" | ${TOLOG}
# determine bkg type: blanksky, lbkg_reg, bkg_spec ?
# according to file type first: text / FITS
# if FITS, then get values of `HDUCLAS1' and `OBJECT'
-if file -bL ${BKGD} | grep -qi 'text'; then
+if file -bL ${BKGD} | \grep -qi 'text'; then
printf "## given \`${BKGD}' is a \`text file'\n"
printf "## use it as local bkg region file\n"
printf "## use *LOCAL BKG SPEC*\n" | ${TOLOG}
@@ -337,9 +344,10 @@ if file -bL ${BKGD} | grep -qi 'text'; then
USE_LBKG_REG=YES
USE_BLANKSKY=NO
USE_BKG_SPEC=NO
-elif file -bL ${BKGD} | grep -qi 'FITS'; then
+elif file -bL ${BKGD} | \grep -qi 'FITS'; then
printf "## given \`${BKGD}' is a \`FITS file'\n"
# get FITS header keyword
+ punlearn dmkeypar
HDUCLAS1=`dmkeypar ${BKGD} HDUCLAS1 echo=yes`
if [ "${HDUCLAS1}" = "EVENTS" ]; then
# event file
@@ -408,17 +416,45 @@ fi
printf "## use RMF: \`${RMF}'\n" | ${TOLOG}
# arf & rmf }}}
-# check given `grpcmd'
-if [ ! -z "${grpcmd}" ]; then
- GRP_CMD="${grpcmd}"
+# check given dmgroup parameters: grouptype, grouptypeval, binspec
+if [ -z "${grouptype}" ]; then
+ GROUPTYPE="${DFT_GROUPTYPE}"
+elif [ "x${grouptype}" = "xNUM_CTS" ] || [ "x${grouptype}" = "xBIN" ]; then
+ GROUPTYPE="${grouptype}"
+else
+ printf "ERROR: given grouptype \`${grouptype}' invalid.\n"
+ exit ${ERR_GRPTYPE}
+fi
+printf "## use grouptype: \`${GROUPTYPE}'\n" | ${TOLOG}
+if [ ! -z "${grouptypeval}" ]; then
+ GROUPTYPEVAL="${grouptypeval}"
+else
+ GROUPTYPEVAL="${DFT_GROUPTYPEVAL}"
+fi
+printf "## use grouptypeval: \`${GROUPTYPEVAL}'\n" | ${TOLOG}
+if [ ! -z "${binspec}" ]; then
+ BINSPEC="${binspec}"
else
- GRP_CMD="${DFT_GRP_CMD}"
+ BINSPEC="${DFT_BINSPEC}"
fi
-printf "## use grppha cmd: \`${GRP_CMD}'\n" | ${TOLOG}
+printf "## use binspec: \`${BINSPEC}'\n" | ${TOLOG}
## parameters }}}
-##################################################
-#### main
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmstat dmkeypar dmhedit dmextract dmgroup"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
+### main ###
+
## region related {{{
## generate the needed region file
printf "generate the output region file ...\n"
@@ -430,11 +466,11 @@ _EOF_
## open the evt file to verify or modify
printf "## check the generated pie region ...\n"
printf "## if modified, save with the same name \`${REG_OUT}' (overwrite)\n"
-ds9 ${EVT} -region ${REG_OUT}
+ds9 ${EVT} -region ${REG_OUT} -cmap he -bin factor 4
## check the (modified) region (pie region end angle)
printf "check the above region (for pie region end angle) ...\n"
-INVALID=`grep -i 'pie' ${REG_OUT} | awk -F'[,()]' '$7 > 360'`
+INVALID=`\grep -i 'pie' ${REG_OUT} | awk -F'[,()]' '$7 > 360'`
if [ "x${INVALID}" != "x" ]; then
printf "*** WARNING: there are pie regions' END_ANGLE > 360\n" | ${TOLOG}
printf "*** will to fix ...\n"
@@ -462,8 +498,11 @@ dmextract infile="${EVT}[sky=region(${REG_OUT})][bin PI]" \
outfile="${AVGT_SPEC}" wmap="[bin det=8]" clobber=yes
# group spectrum
printf "group object spectrum ...\n"
-grppha infile="${AVGT_SPEC}" outfile="${AVGT_SPEC_GRP}" \
- comm="${GRP_CMD} & exit" clobber=yes > /dev/null
+punlearn dmgroup
+dmgroup infile="${AVGT_SPEC}" outfile="${AVGT_SPEC_GRP}" \
+ grouptype="${GROUPTYPE}" grouptypeval=${GROUPTYPEVAL} \
+ binspec="${BINSPEC}" xcolumn="CHANNEL" ycolumn="COUNTS" \
+ clobber=yes
# background
printf "generate the background spectrum ...\n"
diff --git a/scripts/ciao_sbp.sh b/scripts/ciao_sbp.sh
index ef636ca..0e7acd5 100755
--- a/scripts/ciao_sbp.sh
+++ b/scripts/ciao_sbp.sh
@@ -1,25 +1,31 @@
#!/bin/sh
##
-## Extract `surface brighness profile'
+## Extract `surface brighness profile' after exposure correction finished.
##
## NOTES:
## * Only ACIS-I (chip: 0-3) and ACIS-S (chip: 7) supported
##
## Weitian LI <liweitianux@gmail.com>
## Created: 2012/08/16
-UPDATED="2015/03/29"
+##
+VERSION="v4.0"
+UPDATED="2015/06/03"
##
## ChangeLogs:
-## 2015/03/29, Weitian LI
+## v4.0, 2015/06/03, Aaron LI
+## * Copy needed pfiles to current working directory, and
+## set environment variable $PFILES to use these first.
+## * Replaced 'grep' with '\grep', 'ls' with '\ls'
+## v3.3, 2015/03/29, Weitian LI
## * Skip skyfov generation if it already exists.
## * Rename parameter 'aspec' to 'aspect' to match with 'skyfov'
-## 2015/03/06, Weitian LI
+## v3.2, 2015/03/06, Weitian LI
## * Updated this document of the script.
## * Added 'SKIP SINGLE' to the generated QDP of SBP file.
-## v3.1: 2013/02/01, Zhenghao ZHU
-## removes the region in ccd gap of ACIS_I
-## removes the region in the area of point source
-## need asol file to prevent offset
+## v3.1, 2013/02/01, Zhenghao ZHU
+## * removes the region in ccd gap of ACIS_I
+## * removes the region in the area of point source
+## * provide asol file to correct offset
##
unalias -a
@@ -52,8 +58,8 @@ case "$1" in
-[hH]*|--[hH]*)
printf "usage:\n"
printf " `basename $0` evt_e=<evt_e_file> reg=<sbp_reg> expmap=<exp_map> cellreg=<cell_reg> aspect=<asol_file> [bkg=<bkg> log=<logfile> ]\n"
- printf "\nupdated:\n"
- printf "${UPDATED}\n"
+ printf "\nversion:\n"
+ printf " ${VERSION}, ${UPDATED}\n"
exit ${ERR_USG}
;;
esac
@@ -66,16 +72,16 @@ E_RANGE="700:7000"
DFT_CCD_EDGECUT=25
# default `event file' which used to match `blanksky' files
#DFT_EVT="_NOT_EXIST_"
-DFT_EVT_E="`ls evt2_*_e*.fits 2> /dev/null`"
+DFT_EVT_E="`\ls evt2_*_e*.fits 2> /dev/null`"
# default expmap
-DFT_EXPMAP="`ls expmap*.fits 2> /dev/null | head -n 1`"
+DFT_EXPMAP="`\ls expmap*.fits 2> /dev/null | head -n 1`"
# default `radial region file' to extract surface brightness
#DFT_SBP_REG="_NOT_EXIST_"
DFT_SBP_REG="sbprofile.reg"
# defalut pointsource region file
-DFT_CELL_REG="`ls celld*.reg 2> /dev/null`"
+DFT_CELL_REG="`\ls celld*.reg 2> /dev/null`"
# defalut asol file
-DFT_ASOL_FILE="`ls -1 pcad*asol*fits 2> /dev/null`"
+DFT_ASOL_FILE="`\ls -1 pcad*asol*fits 2> /dev/null`"
# default `log file'
DFT_LOGFILE="sbp_`date '+%Y%m%d'`.log"
@@ -206,15 +212,28 @@ fi
printf "## use asol file(s) : \`${ASOL_FILE}'\n" | ${TOLOG}
## parameters }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmkeypar dmlist dmextract dmtcalc skyfov"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
## determine ACIS type {{{
# consistent with `ciao_procevt'
punlearn dmkeypar
DETNAM=`dmkeypar ${EVT_E} DETNAM echo=yes`
-if echo ${DETNAM} | grep -q 'ACIS-0123'; then
+if echo ${DETNAM} | \grep -q 'ACIS-0123'; then
printf "## \`DETNAM' (${DETNAM}) has chips 0123 -> ACIS-I\n"
ACIS_TYPE="I"
CCD="0:3"
-elif echo ${DETNAM} | grep -q 'ACIS-[0-6]*7'; then
+elif echo ${DETNAM} | \grep -q 'ACIS-[0-6]*7'; then
printf "## \`DETNAM' (${DETNAM}) has chip 7 -> ACIS-S\n"
ACIS_TYPE="S"
CCD="7"
@@ -225,7 +244,7 @@ fi
## ACIS type }}}
## check validity of pie region {{{
-INVALID=`grep -i 'pie' ${SBP_REG} | awk -F'[,()]' '$7 > 360'`
+INVALID=`\grep -i 'pie' ${SBP_REG} | awk -F'[,()]' '$7 > 360'`
SBP_REG_FIX="_${SBP_REG%.reg}_fix.reg"
if [ "x${INVALID}" != "x" ]; then
printf "*** WARNING: some pie regions' END_ANGLE > 360\n" | ${TOLOG}
@@ -436,12 +455,12 @@ echo "${REG_CCD_RAW}" >_ccd_raw.reg
SBP_REG_INCCD="_${SBP_REG%.reg}_inccd.reg"
[ -e "${SBP_REG_INCCD}" ] && mv -fv ${SBP_REG_INCCD} ${SBP_REG_INCCD}_bak
-echo "CMD: cat ${CELL_REG} | grep \( | sed -e ':a;N;s/\n/-/;ta'"
-CELL_REG_USE=`cat ${CELL_REG} | grep \( | sed -e ':a;N;s/\n/-/;ta'`
+echo "CMD: cat ${CELL_REG} | \grep \( | sed -e ':a;N;s/\n/-/;ta'"
+CELL_REG_USE=`cat ${CELL_REG} | \grep \( | sed -e ':a;N;s/\n/-/;ta'`
# exit 233
if [ "${ACIS_TYPE}" = "S" ]; then
- grep -iE '^(pie|annulus)' ${SBP_REG_FIX} | sed "s/$/\ \&\ `cat ${REG_FILE_CCD}`/" | sed "s/$/\ \-\ ${CELL_REG_USE}/" > ${SBP_REG_INCCD}
+ \grep -iE '^(pie|annulus)' ${SBP_REG_FIX} | sed "s/$/\ \&\ `cat ${REG_FILE_CCD}`/" | sed "s/$/\ \-\ ${CELL_REG_USE}/" > ${SBP_REG_INCCD}
else
L=`cat ${SBP_REG_FIX} | wc -l `
@@ -457,6 +476,7 @@ printf "extract surface brightness profile ...\n"
SBP_DAT="${SBP_REG%.reg}.fits"
[ -e "${SBP_DAT}" ] && mv -fv ${SBP_DAT} ${SBP_DAT}_bak
if [ -r "${BKG}" ]; then
+ punlearn dmkeypar
EXPO_EVT=`dmkeypar ${EVT_E} EXPOSURE echo=yes`
EXPO_BKG=`dmkeypar ${BKG} EXPOSURE echo=yes`
BKG_NORM=`echo "${EXPO_EVT} ${EXPO_BKG}" | awk '{ printf("%g", $1/$2) }'`
diff --git a/scripts/make_infojson.sh b/scripts/make_infojson.sh
index 17fd2a8..85706c0 100755
--- a/scripts/make_infojson.sh
+++ b/scripts/make_infojson.sh
@@ -10,7 +10,15 @@
# Weitian LI <liweitianux@gmail.com>
# 2014/06/24
#
+#
+VERSION="v2.0"
+UPDATED="2015/06/03"
+#
# ChangeLog:
+# v2.0, 2015/06/03, Aaron LI
+# * Copy needed pfiles to tmp directory,
+# set environment variable $PFILES to use these first.
+# and remove them after usage.
# v1.1, 2014/12/05, Weitian LI
# regex '\s+' not supported by old grep version, change to use '[[:space:]]+'
#
@@ -20,7 +28,10 @@ case "$1" in
-[hH]*|--[hH]*)
printf "usage:\n"
printf " `basename $0` evtdir=<evt_dir> spcdir=<spc_dir> imgdir=<img_dir> massdir=<mass_dir> json=<info_json>\n"
- printf "NOTE: run this script in dir 'repro/'\n"
+ printf "\nNOTE:\n"
+ printf " run this script in dir 'repro/'\n"
+ printf "\nversion:\n"
+ printf " ${VERSION}, ${UPDATED}\n"
exit 1
;;
esac
@@ -126,6 +137,22 @@ fi
printf "## use massdir: \`${MASS_DIR}'\n"
## }}}
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="dmkeypar"
+
+PFILES_TMPDIR="/tmp/pfiles-$$"
+[ -d "${PFILES_TMPDIR}" ] && rm -rf ${PFILES_TMPDIR} || mkdir ${PFILES_TMPDIR}
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ punlearn ${tool} && cp -Lvf ${pfile} ${PFILES_TMPDIR}/
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="${PFILES_TMPDIR}:${PFILES}"
+## pfiles }}}
+
# set files
EVT2_CLEAN=`readlink -f ${EVT_DIR}/evt2*_clean*.fits | head -n 1`
MASS_SBP_CFG=`readlink -f ${MASS_DIR}/source.cfg`
@@ -159,3 +186,6 @@ cat > ${JSON_FILE} << _EOF_
},
_EOF_
+# clean pfiles
+rm -rf ${PFILES_TMPDIR}
+