aboutsummaryrefslogtreecommitdiffstats
path: root/scripts/ciao_calc_ct.sh
diff options
context:
space:
mode:
authorWeitian LI <liweitianux@gmail.com>2014-06-18 22:20:59 +0800
committerWeitian LI <liweitianux@gmail.com>2014-06-18 22:20:59 +0800
commite3923265d0d6949407a83726e9a9bd5d97079221 (patch)
tree9afd8520595f4cf80815b9bccfc3dcf2879ebe47 /scripts/ciao_calc_ct.sh
downloadchandra-acis-analysis-e3923265d0d6949407a83726e9a9bd5d97079221.tar.bz2
Initial commit
Added files: * mass_profile: developed by Junhua GU, modified by Weitian LI * opt_utilities: developed by Junhua GU * tools/cosmo_calc: originated from 'calc_distance', modified * scripts: scripts used to process Chandra ACIS data * files: useful files used in processing * HOWTO_chandra_acis_process.txt * README.md
Diffstat (limited to 'scripts/ciao_calc_ct.sh')
-rwxr-xr-xscripts/ciao_calc_ct.sh593
1 files changed, 593 insertions, 0 deletions
diff --git a/scripts/ciao_calc_ct.sh b/scripts/ciao_calc_ct.sh
new file mode 100755
index 0000000..6fb1d79
--- /dev/null
+++ b/scripts/ciao_calc_ct.sh
@@ -0,0 +1,593 @@
+#!/bin/sh
+#
+unalias -a
+export LC_COLLATE=C
+###########################################################
+## based on `ciao_r500avgt' ##
+## for calculating the `cooling time ' ##
+## within (0.-0.048 r500) region ##
+## ##
+## Junhua Gu ##
+## August 22, 2012 ##
+## ##
+## LIweitiaNux ##
+## 2013/04/28 ##
+###########################################################
+
+###########################################################
+## ChangeLogs
+## 2014/06/18: 'cooling_time2.sh' -> 'ciao_calc_ct.sh'
+###########################################################
+
+## comology calculator {{{
+## XXX: MODIFY THIS TO YOUR OWN CASE
+## and make sure this `calc' is executable
+## NOTES: use `$HOME' instead of `~' in path
+BASE_PATH=`dirname $0`
+COSCALC="`which cosmo_calc calc_distance | head -n 1`"
+if [ -z "${COSCALC}" ] || [ ! -x ${COSCALC} ]; then
+ printf "ERROR: \`COSCALC: ${COSCALC}' neither specified nor executable\n"
+ exit 255
+fi
+## }}}
+
+## about, used in `usage' {{{
+VERSION="v1.1"
+UPDATE="2012-08-26"
+## about }}}
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+ERR_ARF=51
+ERR_RMF=52
+ERR_UNI=61
+## error code }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt2_clean> r500=<r500_kpc> regin=<input_reg> regout=<output_reg> bkgd=<blank_evt | lbkg_reg | bkg_spec> nh=<nH> z=<redshift> arf=<warf_file> rmf=<wrmf_file> [ grpcmd=<grppha_cmd> log=<log_file> ]\n"
+ printf "\nversion:\n"
+ printf "${VERSION}, ${UPDATE}\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default `event file' which used to match `blanksky' files
+#DFT_EVT="_NOT_EXIST_"
+DFT_EVT="`ls evt2*_clean.fits 2> /dev/null`"
+# default `bkgd', use `bkgcorr_blanksky*' corrected bkg spectrum
+DFT_BKGD="`ls bkgcorr_blanksky_*.pi 2> /dev/null`"
+# default basedir
+DFT_BASEDIR="../.."
+# default info_json pattern
+DFT_JSON_PAT="*_INFO.json"
+# default `radial region file'
+#DFT_REG_IN="_NOT_EXIST_"
+DFT_REG_IN="rspec.reg"
+# default output region file (0.1-0.5 r500 region)
+DFT_REG_OUT="cooling.reg"
+# default ARF/RMF, the one of the outmost region
+DFT_ARF="`ls r1_*.warf 2> /dev/null`"
+DFT_RMF="`ls r1_*.wrmf 2> /dev/null`"
+
+# default `group command' for `grppha'
+#DFT_GRP_CMD="group 1 128 2 129 256 4 257 512 8 513 1024 16"
+DFT_GRP_CMD="group min 20"
+# default `log file'
+DFT_LOGFILE="cooling_`date '+%Y%m%d'`.log"
+
+INNER=0.0
+OUTER=0.048
+CT_RES="cooling_results.txt"
+# default output xspec scripts
+XSPEC_SCRIPT="xspec_cooling.xcm"
+# deproj xspec script, generated by `deproj_spectra'
+# from which get `nh' and `redshift'
+XSPEC_DEPROJ="xspec_deproj.xcm"
+## default parameters }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+
+## background renormalization (BACKSCAL) {{{
+# renorm background according to particle background
+# energy range: 9.5-12.0 keV (channel: 651-822)
+CH_LOW=651
+CH_HI=822
+pb_flux() {
+ punlearn dmstat
+ COUNTS=`dmstat "$1[channel=${CH_LOW}:${CH_HI}][cols COUNTS]" | grep -i 'sum:' | awk '{ print $2 }'`
+ punlearn dmkeypar
+ EXPTIME=`dmkeypar $1 EXPOSURE echo=yes`
+ BACK=`dmkeypar $1 BACKSCAL echo=yes`
+ # fix `scientific notation' bug for `bc'
+ EXPTIME_B=`echo ${EXPTIME} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_B=`echo "( ${BACK} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ PB_FLUX=`echo "scale = 16; ${COUNTS} / ${EXPTIME_B} / ${BACK_B}" | bc -l`
+ echo ${PB_FLUX}
+}
+
+bkg_renorm() {
+ # $1: src spectrum, $2: back spectrum
+ PBFLUX_SRC=`pb_flux $1`
+ PBFLUX_BKG=`pb_flux $2`
+ BACK_OLD=`dmkeypar $2 BACKSCAL echo=yes`
+ BACK_OLD_B=`echo "( ${BACK_OLD} )" | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ BACK_NEW=`echo "scale = 16; ${BACK_OLD_B} * ${PBFLUX_BKG} / ${PBFLUX_SRC}" | bc -l`
+ printf "\`$2': BACKSCAL:\n"
+ printf " ${BACK_OLD} --> ${BACK_NEW}\n"
+ punlearn dmhedit
+ dmhedit infile=$2 filelist=none operation=add \
+ key=BACKSCAL value=${BACK_NEW} comment="old value: ${BACK_OLD}"
+}
+## bkg renorm }}}
+## functions end }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+## check log parameters {{{
+if [ ! -z "${log}" ]; then
+ LOGFILE="${log}"
+else
+ LOGFILE=${DFT_LOGFILE}
+fi
+printf "## use logfile: \`${LOGFILE}'\n"
+[ -e "${LOGFILE}" ] && mv -fv ${LOGFILE} ${LOGFILE}_bak
+TOLOG="tee -a ${LOGFILE}"
+echo "process script: `basename $0`" >> ${LOGFILE}
+echo "process date: `date`" >> ${LOGFILE}
+## log }}}
+
+# check given parameters
+# process `nh' and `redshift' {{{
+if [ ! -r "${XSPEC_DEPROJ}" ]; then
+ printf "## the script \`${XSPEC_DEPROJ}' generated by \`deproj_spectra' NOT found\n" | ${TOLOG}
+ printf "## please supply the value of \`nh' and \`redshift' manully\n" | ${TOLOG}
+ read -p "> value of nh: " N_H
+ read -p "> value of redshift: " REDSHIFT
+else
+ # get `nh' and `redshift' from xspec script
+ LN=`grep -n 'projct\*wabs\*apec' ${XSPEC_DEPROJ} | tail -n 1 | cut -d':' -f1`
+ # calc the line number of which contains `nh'
+ LN_NH=`expr ${LN} + 4`
+ NH_XCM=`head -n ${LN_NH} ${XSPEC_DEPROJ} | tail -n 1 | awk '{ print $1 }'`
+ # calc the line number of `redshift'
+ LN_RS=`expr ${LN} + 7`
+ RS_XCM=`head -n ${LN_RS} ${XSPEC_DEPROJ} | tail -n 1 | awk '{ print $1 }'`
+ printf "## get value of nh: \`${NH_XCM}' (from \`${XSPEC_DEPROJ}')\n" | ${TOLOG}
+ printf "## get value of redshift: \`${RS_XCM}' (from \`${XSPEC_DEPROJ}')\n" | ${TOLOG}
+
+ ## if `nh' and `redshift' supplied in cmdline, then use them
+ if [ ! -z "${nh}" ]; then
+ N_H=${nh}
+ else
+ N_H=${NH_XCM}
+ fi
+ # redshift
+ if [ ! -z "${z}" ]; then
+ REDSHIFT=${z}
+ else
+ REDSHIFT=${RS_XCM}
+ fi
+fi
+printf "## use nH: ${N_H}\n" | ${TOLOG}
+printf "## use redshift: ${REDSHIFT}\n" | ${TOLOG}
+# nh & redshift }}}
+
+# check basedir & json file
+if [ -d "${basedir}" ]; then
+ BASEDIR=${basedir}
+else
+ BASEDIR=${DFT_BASEDIR}
+fi
+if [ ! -z "${json}" ] && [ -r "${BASEDIR}/${json}" ]; then
+ JSON_FILE="${BASEDIR}/${json}"
+elif [ `ls ${BASEDIR}/${DFT_JSON_PAT} 2> /dev/null | wc -l` -eq 1 ]; then
+ JSON_FILE=`ls ${BASEDIR}/${DFT_JSON_PAT}`
+else
+ read -p "> JSON_file: " JSON_FILE
+ if [ ! -r "${JSON_FILE}" ]; then
+ printf "ERROR: cannot access given \`${JSON_FILE}'\n"
+ exit ${ERR_JSON}
+ fi
+fi
+printf "## use json_file: \`${JSON_FILE}'\n" | ${TOLOG}
+
+# process `r500' {{{
+R500_RAW=`grep '"R500.*kpc' ${JSON_FILE} | sed 's/.*"R500.*":\ //' | sed 's/\ *,$//'`
+if [ ! -z "${r500}" ]; then
+ R500_RAW=${r500}
+fi
+if [ -z "${R500_RAW}" ]; then
+ printf "## input R500 followed with unit, e.g.: 800kpc, 400pix\n"
+ read -p "> value of \`R500' (in pixel/kpc): " R500_RAW
+fi
+R500_VAL=`echo "${R500_RAW}" | tr -d 'a-zA-Z, '`
+R500_UNI=`echo "${R500_RAW}" | tr -d '0-9, '`
+printf "## get \`R500': ${R500_VAL} in unit \`${R500_UNI}'\n" | ${TOLOG}
+
+# if in kpc, convert to pix
+case "${R500_UNI}" in
+ [pP]*)
+ printf "## units in \`pixel', conversion not needed\n" | ${TOLOG}
+ R500_PIX_B=`echo ${R500_VAL} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ ;;
+ *)
+ printf "## units in \`kpc', convert to \`Chandra pixel'\n" | ${TOLOG}
+ KPC_PER_PIX=`${COSCALC} ${REDSHIFT} | grep 'kpc.*pix' | tr -d 'a-zA-Z_#=(),:/ '`
+ # convert scientific notation for `bc'
+ KPC_PER_PIX_B=`echo ${KPC_PER_PIX} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ printf "## calculated \`kpc/pixel': ${KPC_PER_PIX_B}\n"
+ R500_VAL_B=`echo ${R500_VAL} | sed 's/[eE]/\*10\^/' | sed 's/+//'`
+ R500_PIX_B=`echo "scale = 4; ${R500_VAL_B} / ( ${KPC_PER_PIX_B} )" | bc -l`
+ ;;
+esac
+# calc (inner-outer R500)
+R_IN=`echo "scale = 4; ${INNER} * ${R500_PIX_B}" | bc -l`
+R_OUT=`echo "scale = 4; ${OUTER} * ${R500_PIX_B}" | bc -l`
+printf "## R500 in units pixel: ${R500_PIX_B}\n" | ${TOLOG}
+printf "## (${INNER}-${OUTER} R500) range in pixel: ${R_IN} - ${R_OUT}\n" | ${TOLOG}
+# r500 }}}
+
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "> clean evt2 file: " EVT
+ if [ ! -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n" | ${TOLOG}
+
+# input and output region files {{{
+if [ -r "${regin}" ]; then
+ REG_IN="${regin}"
+elif [ -r "${DFT_REG_IN}" ]; then
+ REG_IN=${DFT_REG_IN}
+else
+ read -p "> previous used radial spec regfile: " REG_IN
+ if [ ! -r "${REG_IN}" ]; then
+ printf "ERROR: cannot access given \`${REG_IN}' region file\n"
+ exit ${ERR_REG}
+ fi
+fi
+printf "## use previous regfile: \`${REG_IN}'\n" | ${TOLOG}
+if [ ! -z "${regout}" ]; then
+ REG_OUT="${regout}"
+else
+ REG_OUT=${DFT_REG_OUT}
+fi
+[ -e "${REG_OUT}" ] && mv -fv ${REG_OUT} ${REG_OUT}_bak
+printf "## set output regfile (0.1-0.5 r500 region): \`${REG_OUT}'\n" | ${TOLOG}
+
+# get center position from `regin'
+# only consider `pie' or `annulus'-shaped region
+TMP_REG=`grep -iE '(pie|annulus)' ${REG_IN} | head -n 1`
+XC=`echo ${TMP_REG} | tr -d ' ' | awk -F'[(),]' '{ print $2 }'`
+YC=`echo ${TMP_REG} | tr -d ' ' | awk -F'[(),]' '{ print $3 }'`
+printf "## get center coord: (${XC},${YC})\n" | ${TOLOG}
+# region files }}}
+
+# check given bkgd, determine background {{{
+if [ -r "${bkgd}" ]; then
+ BKGD=${bkgd}
+elif [ -r "${DFT_BKGD}" ]; then
+ BKGD=${DFT_BKGD}
+else
+ read -p "> background (blanksky_evt | lbkg_reg | bkg_spec): " BKGD
+ if [ ! -r "${BKGD}" ]; then
+ printf "ERROR: cannot access given \`${BKGD}'\n"
+ exit ${ERR_BKG}
+ fi
+fi
+printf "## use bkgd: \`${BKGD}'\n" | ${TOLOG}
+# determine bkg type: blanksky, lbkg_reg, bkg_spec ?
+# according to file type first: text / FITS
+# if FITS, then get values of `HDUCLAS1' and `OBJECT'
+if file -bL ${BKGD} | grep -qi 'text'; then
+ printf "## given \`${BKGD}' is a \`text file'\n"
+ printf "## use it as local bkg region file\n"
+ printf "## use *LOCAL BKG SPEC*\n" | ${TOLOG}
+ # just set flags, extract spectrum later
+ USE_LBKG_REG=YES
+ USE_BLANKSKY=NO
+ USE_BKG_SPEC=NO
+elif file -bL ${BKGD} | grep -qi 'FITS'; then
+ printf "## given \`${BKGD}' is a \`FITS file'\n"
+ # get FITS header keyword
+ HDUCLAS1=`dmkeypar ${BKGD} HDUCLAS1 echo=yes`
+ if [ "${HDUCLAS1}" = "EVENTS" ]; then
+ # event file
+ printf "## given file is \`event'\n"
+ # check if `blanksky' or `stowed bkg'
+ BKG_OBJ=`dmkeypar ${BKGD} OBJECT echo=yes`
+ if [ "${BKG_OBJ}" = "BACKGROUND DATASET" ] || [ "${BKG_OBJ}" = "ACIS STOWED" ]; then
+ # valid bkg evt file
+ printf "## given FITS file is a valid bkgrnd file\n"
+ printf "## use *BLANKSKY*\n" | ${TOLOG}
+ USE_BLANKSKY=YES
+ USE_LBKG_REG=NO
+ USE_BKG_SPEC=NO
+ # specify `BLANKSKY'
+ BLANKSKY=${BKGD}
+ else
+ # invalid bkg evt file
+ printf "ERROR: invalid bkg evt file given\n"
+ exit ${ERR_BKGTY}
+ fi
+ elif [ "${HDUCLAS1}" = "SPECTRUM" ]; then
+ # spectrum file
+ printf "## given file is \`spectrum'\n"
+ printf "## use *BKG SPECTRUM*\n" | ${TOLOG}
+ USE_BKG_SPEC=YES
+ USE_BLANKSKY=NO
+ USE_LBKG_REG=NO
+ # specify `BKG_SPEC'
+ BKG_SPEC=${BKGD}
+ else
+ # other type
+ printf "ERROR: other type FITS given\n"
+ exit ${ERR_BKGTY}
+ fi
+else
+ printf "ERROR: given \`${BKGD}' type UNKNOWN\n"
+ exit ${ERR_BKGTY}
+fi
+# bkgd }}}
+
+# check `arf' and `rmf' {{{
+if [ -r "${arf}" ]; then
+ ARF=${arf}
+elif [ -r "${DFT_ARF}" ]; then
+ ARF=${DFT_ARF}
+else
+ read -p "> provide the ARF to use: " ARF
+ if [ ! -r "${ARF}" ]; then
+ printf "ERROR: cannot access given \`${ARF}'\n"
+ exit ${ERR_ARF}
+ fi
+fi
+printf "## use ARF: \`${ARF}'\n" | ${TOLOG}
+# rmf
+if [ -r "${rmf}" ]; then
+ RMF=${rmf}
+elif [ -r "${DFT_RMF}" ]; then
+ RMF=${DFT_RMF}
+else
+ read -p "> provide the RMF to use: " RMF
+ if [ ! -r "${RMF}" ]; then
+ printf "ERROR: cannot access given \`${RMF}'\n"
+ exit ${ERR_RMF}
+ fi
+fi
+printf "## use RMF: \`${RMF}'\n" | ${TOLOG}
+# arf & rmf }}}
+
+# check given `grpcmd'
+if [ ! -z "${grpcmd}" ]; then
+ GRP_CMD="${grpcmd}"
+else
+ GRP_CMD="${DFT_GRP_CMD}"
+fi
+printf "## use grppha cmd: \`${GRP_CMD}'\n" | ${TOLOG}
+## parameters }}}
+
+##################################################
+#### main
+## D_A
+#D_A_CM=`${COSCALC} ${REDSHIFT} | grep '^d_a_cm' | awk '{ print $2 }'`
+D_A_CM=`${COSCALC} ${REDSHIFT} | grep -i 'd_a.*cm' | awk -F'=' '{ print $2 }' | awk '{ print $1 }'`
+printf "D_A_CM(${REDSHIFT})= ${D_A_CM}\n"
+
+## region related {{{
+## generate the needed region file
+printf "generate the output region file ...\n"
+cat > ${REG_OUT} << _EOF_
+# Region file format: CIAO version 1.0
+pie(${XC},${YC},${R_IN},${R_OUT},0,360)
+_EOF_
+
+## open the evt file to verify or modify
+printf "## check the generated pie region ...\n"
+printf "## if modified, save with the same name \`${REG_OUT}' (overwrite)\n"
+ds9 ${EVT} -regions ${REG_OUT} -cmap sls -bin factor 4
+
+## check the (modified) region (pie region end angle)
+printf "check the above region (for pie region end angle) ...\n"
+INVALID=`grep -i 'pie' ${REG_OUT} | awk -F'[,()]' '$7 > 360'`
+if [ "x${INVALID}" != "x" ]; then
+ printf "*** WARNING: there are pie regions' END_ANGLE > 360\n" | ${TOLOG}
+ printf "*** will to fix ...\n"
+ mv -fv ${REG_OUT} ${REG_OUT}_tmp
+ # using `awk' to fix
+ awk -F'[,()]' '{
+ if ($7 > 360) {
+ printf "%s(%.2f,%.2f,%.2f,%.2f,%.2f,%.2f)\n", $1,$2,$3,$4,$5,$6,($7-360)
+ }
+ else {
+ print $0
+ }
+ }' ${REG_OUT}_tmp > ${REG_OUT}
+ rm -f ${REG_OUT}_tmp
+fi
+## region related }}}
+
+## generate spectrum {{{
+# check counts
+punlearn dmlist
+CNT_RC=`dmlist infile="${EVT}[sky=region(${REG_OUT})][energy=700:7000]" opt=block | grep 'EVENTS' | awk '{ print $8 }'`
+if [ ${CNT_RC} -lt 500 ]; then
+ F_WC=true
+ WC="LOW_COUNTS"
+ printf "*** WARNING: counts_in_0.048R500=${CNT_RC} < 500 ***\n"
+fi
+
+# object
+printf "extract object spectrum \`${AVGT_SPEC}' ...\n"
+AVGT_SPEC="${REG_OUT%.reg}.pi"
+AVGT_SPEC_GRP="${AVGT_SPEC%.pi}_grp.pi"
+punlearn dmextract
+dmextract infile="${EVT}[sky=region(${REG_OUT})][bin PI]" \
+ outfile="${AVGT_SPEC}" wmap="[bin det=8]" clobber=yes
+# group spectrum
+printf "group object spectrum ...\n"
+grppha infile="${AVGT_SPEC}" outfile="${AVGT_SPEC_GRP}" \
+ comm="${GRP_CMD} & exit" clobber=yes > /dev/null
+
+# background
+printf "generate the background spectrum ...\n"
+AVGT_BKG="${AVGT_SPEC%.pi}_bkg.pi"
+if [ "${USE_BLANKSKY}" = "YES" ]; then
+ # use blanksky as background file
+ printf "extract spectrum from blanksky ...\n"
+ punlearn dmextract
+ dmextract infile="${BLANKSKY}[sky=region(${REG_OUT})][bin PI]" \
+ outfile=${AVGT_BKG} wmap="[bin det=8]" clobber=yes
+elif [ "${USE_LBKG_REG}" = "YES" ]; then
+ printf "extract local background ...\n"
+ punlearn dmextract
+ dmextract infile="${EVT}[sky=region(${BKGD})][bin PI]" \
+ outfile=${AVGT_BKG} wmap="[bin det=8]" clobber=yes
+elif [ "${USE_BKG_SPEC}" = "YES" ]; then
+ printf "copy specified background spectrum ...\n"
+ cp -fv ${BKG_SPEC} ${AVGT_BKG}
+fi
+
+printf "renormalize the background ...\n"
+bkg_renorm ${AVGT_SPEC} ${AVGT_BKG}
+## spectrum }}}
+
+## generate XSPEC script {{{
+printf "generate a XSPEC script ...\n"
+[ -e "${XSPEC_SCRIPT}" ] && mv -fv ${XSPEC_SCRIPT} ${XSPEC_SCRIPT}_bak
+cat > ${XSPEC_SCRIPT} << _EOF_
+## XSPEC script
+## spectrum analysis to get the average temperatue with (0-0.048 R500)
+##
+## generated by: \``basename $0`'
+## date: \``date`'
+##
+
+# xspec settings
+statistic chi
+abund grsa
+query yes
+
+# data
+data ${AVGT_SPEC_GRP}
+response ${RMF}
+arf ${ARF}
+backgrnd ${AVGT_BKG}
+
+# fitting range
+ignore bad
+ignore 0.0-0.7,7.0-**
+
+# plot related
+setplot energy
+
+method leven 10 0.01
+xsect bcmc
+cosmo 70 0 0.73
+xset delta 0.01
+systematic 0
+
+# model
+model wabs*apec
+ ${N_H} -0.001 0 0 100000 1e+06
+ 1.0 0.01 0.008 0.008 64 64
+ 0.4 0.001 0 0 5 5
+ ${REDSHIFT} -0.01 -0.999 -0.999 10 10
+ 0.0 0.01 0 0 1e+24 1e+24
+
+## xspec script end
+
+proc calc_cooling_time {} {
+ set rout ${R_OUT}
+ set d_a_cm ${D_A_CM}
+ fit 1000
+ tclout param 4
+ set z [ lindex \$xspec_tclout 0 ]
+ tclout param 2
+ set T [ lindex \$xspec_tclout 0 ]
+ tclout param 5
+ set norm [ lindex \$xspec_tclout 0 ]
+ newpar 1 0
+ dummyrsp .001 100
+ flux .001 100
+
+ tclout flux
+ set flux [ lindex \$xspec_tclout 0 ]
+ puts "flux(0.01-100kev): \$flux"
+ set rout_cm [ expr \$rout*.492/3600/180*3.14159*\$d_a_cm ]
+ set V [ expr 4./3.*3.14159*\$rout_cm*\$rout_cm*\$rout_cm ]
+ set nenh [ expr \$norm*1E14*4*3.14159*\$d_a_cm*\$d_a_cm*(1+\$z)*(1+\$z)*(1+\$z)*(1+\$z)/\$V ]
+ set d_l_cm [ expr \$d_a_cm*(1+\$z)*(1+\$z) ]
+ set ne_np_ratio 1.2
+ set ne [ expr sqrt(\$nenh*\$ne_np_ratio) ]
+ set lx [ expr \$flux*4*3.14159*\$d_l_cm*\$d_l_cm ]
+ set kb 1.602E-9
+ set ct [ expr 3./2.*(\$ne+\$ne/\$ne_np_ratio)*\$kb*\$T*\$V/\$lx ]
+ set ct_gyr [ expr \$ct/(3600*24*365.25*1E9) ]
+ puts "Cooling_time= \$ct_gyr Gyr"
+}
+
+fit 1000
+calc_cooling_time
+
+tclexit
+_EOF_
+## xspec script }}}
+
+## invoke xspec to calc
+if [ "x${F_WC}" = "xtrue" ]; then
+ printf "\n*** WC: LOW_COUNTS ***\n"
+ printf "*** WARNING: counts_in_0.048R500=${CNT_RC} < 500 ***\n"
+else
+ [ -e "${CT_RES}" ] && mv -f ${CT_RES} ${CT_RES}_bak
+ printf "invoking XSPEC to calculate cooling time ...\n"
+ xspec - ${XSPEC_SCRIPT} | tee ${CT_RES}
+
+ OBS_ID=`grep '"Obs.*ID' ${JSON_FILE} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+ OBJ_NAME=`grep '"Source\ Name' ${JSON_FILE} | awk -F':' '{ print $2 }' | sed -e 's/\ *"//' -e 's/"\ *,$//'`
+ CT=`grep -i '^Cooling_time' ${CT_RES} | awk '{ print $2 }'`
+
+ printf "\n" | tee -a ${CT_RES}
+ printf "# OBS_ID,OBJ_NAME,CT_gyr\n" | tee -a ${CT_RES}
+ printf "# $OBS_ID,$OBJ_NAME,$CT\n" | tee -a ${CT_RES}
+fi
+
+exit 0
+