aboutsummaryrefslogtreecommitdiffstats
path: root/astro
diff options
context:
space:
mode:
authorAaron LI <aly@aaronly.me>2017-11-16 23:40:30 +0800
committerAaron LI <aly@aaronly.me>2017-11-16 23:40:30 +0800
commit4037655221275dd8ff1f5f4afa342cf73b68161a (patch)
tree0229f1440cddcb44766a68f5885ee10a71a2f529 /astro
parent5dbb18fcc00813bfbbc0e87fc6f95a9b2c58b5fe (diff)
downloadatoolbox-4037655221275dd8ff1f5f4afa342cf73b68161a.tar.bz2
astro/xmm: Add several previously developed scripts
Diffstat (limited to 'astro')
-rwxr-xr-xastro/xmm/epic_repro.sh83
-rwxr-xr-xastro/xmm/make_regfits.sh49
-rwxr-xr-xastro/xmm/make_spectrum.sh95
-rwxr-xr-xastro/xmm/mos_source_detect.sh212
-rwxr-xr-xastro/xmm/sky2det.sh53
-rw-r--r--astro/xmm/sky2det_manual.sh80
-rw-r--r--astro/xmm/xspec_bkgcorr_xmm.tcl447
7 files changed, 1019 insertions, 0 deletions
diff --git a/astro/xmm/epic_repro.sh b/astro/xmm/epic_repro.sh
new file mode 100755
index 0000000..9dfce84
--- /dev/null
+++ b/astro/xmm/epic_repro.sh
@@ -0,0 +1,83 @@
+#!/bin/sh
+#
+##################################################################
+## reprocess data ##
+## apply the most recent calibrations ##
+## ##
+## LIweitiaNux, February 17, 2012 ##
+##################################################################
+
+## init SAS if needed
+if ! echo $PATH | tr ':' '\n' | grep 'xmmsas' > /dev/null; then
+ sasinit > /dev/null # 'sasinit' is a func defined in ~/.bashrc
+fi
+
+## set needed variables
+WD=`pwd -P`
+ODF=${WD}/ODF
+if ! [ -d ${ODF} ]; then
+ printf "ERROR: NOT found 'ODF' directory in the current place\n"
+ printf " You may in wrong directory\n"
+ exit 1
+fi
+export SAS_ODF=${ODF}
+export SAS_CCF=${ODF}/ccf.cif
+PPS=${WD}/PPS # dir to save processed files
+PROC=${WD}/PROC # processing dir
+[ -d ${PPS} ] && rm -f ${PPS}/* || mkdir ${PPS}
+[ -d ${PROC} ] && rm -f ${PROC}/* || mkdir ${PROC}
+printf "ODF=${ODF}\n"
+printf "PPS=${PPS}\n"
+printf "PROC=${PROC}\n"
+
+## prepare the data
+## Ref: http://heasarc.gsfc.nasa.gov/docs/xmm/abc/node7.html
+cd ${ODF}
+[ -e ./ccf.cif ] && rm -f ./ccf.cif
+cifbuild # generate new 'ccf.cif'
+if ls *SUM.SAS; then
+ rm -f *SUM.SAS
+fi
+odfingest # generate new '*SUM.SAS'
+export SAS_ODF=${ODF}/`ls *SUM.SAS`
+
+## rerunning the pipeline
+## Ref: http://heasarc.gsfc.nasa.gov/docs/xmm/abc/node8.html
+cd ${PPS}
+printf "enter directory ${PPS}\n"
+printf "rerunning the pipeline ...\n"
+printf "run 'emproc' to processe MOS* data ...\n"
+emproc # process 'MOS*' data
+printf "run 'epproc' to processe PN data ...\n"
+epproc # process 'PN' data
+
+OBS_ID=`ls *MOS1*ImagingEvts* | sed 's/_EMOS.*//' | sed 's/[0-9]*_//'`
+MOS1_EVT=`ls *EMOS1*ImagingEvts*`
+MOS2_EVT=`ls *EMOS2*ImagingEvts*`
+PN_EVT=`ls *PN*ImagingEvts*`
+ATTHK=`ls *AttHk*`
+
+## store variables for later use
+VARS_FILE=${WD}/variables
+printf "save some variables to file '${VARS_FILE}' for later use ...\n"
+[ -e ${VARS_FILE} ] && mv -f ${VARS_FILE} ${VARS_FILE}.bak
+touch ${VARS_FILE}
+printf "#!/bin/sh\n\n" >> ${VARS_FILE}
+printf "# `date`\n\n" >> ${VARS_FILE}
+printf "export SAS_CCFPATH=${SAS_CCFPATH}\n" >> ${VARS_FILE}
+printf "export SAS_CCF=${SAS_CCF}\n" >> ${VARS_FILE}
+printf "export SAS_ODF=${SAS_ODF}\n\n" >> ${VARS_FILE}
+printf "WORK_DIR=${WD}\n" >> ${VARS_FILE}
+printf "ODF=${ODF}\n" >> ${VARS_FILE}
+printf "PPS=${PPS}\n" >> ${VARS_FILE}
+printf "PROC=${PROC}\n\n" >> ${VARS_FILE}
+printf "OBS_ID=${OBS_ID}\n" >> ${VARS_FILE}
+printf "MOS1_EVT=${MOS1_EVT}\n" >> ${VARS_FILE}
+printf "MOS2_EVT=${MOS2_EVT}\n" >> ${VARS_FILE}
+printf "PN_EVT=${PN_EVT}\n" >> ${VARS_FILE}
+printf "ATTHK=${ATTHK}\n\n" >> ${VARS_FILE}
+printf "DONE\n"
+
+printf "!!!ALL FINISHED!!!
+exit 0
+
diff --git a/astro/xmm/make_regfits.sh b/astro/xmm/make_regfits.sh
new file mode 100755
index 0000000..9a38e50
--- /dev/null
+++ b/astro/xmm/make_regfits.sh
@@ -0,0 +1,49 @@
+#/bin/sh
+#
+# Make a new region FITS file based on the original bkg-region, which
+# is created by SAS tool 'region', and custom ds9 region list.
+# The newly created region FITS file can be used to create a new mask.
+#
+# Aaron LI
+# Created: 2015-11-06
+# Updated: 2015-11-09
+#
+# ChangeLog:
+# * Update sed match pattern to allow '-' minus sign
+#
+
+if [ $# -ne 3 ]; then
+ echo "Usage:"
+ echo " `basename $0` <orig_reg.fits> <ds9.reg> <new_reg.fits>"
+ echo ""
+ echo "Note: only CIRCLE region are supported!"
+ exit 1
+fi
+
+ORIG_REGFITS="$1"
+DS9REG="$2"
+NEW_REGFITS="$3"
+[ ! -e "${ORIG_REGFITS}" ] && \
+ echo "ERROR: ${ORIG_REGFITS} does not exist" && exit 11
+[ ! -e "${DS9REG}" ] && echo "ERROR: ${DS9REG} does not exist" && exit 12
+[ -e "${NEW_REGFITS}" ] && mv -fv ${NEW_REGFITS} ${NEW_REGFITS}_bak
+
+
+TMP_CDFILE="cdfile.tmp$$"
+TMP_DATAFILE="datafile.tmp$$"
+
+flcol "${ORIG_REGFITS}[REGION]" > ${TMP_CDFILE}
+sed -i'' '/^___Column_Names__/d' ${TMP_CDFILE}
+
+grep -i '^circle' ${DS9REG} | tr 'a-z' 'A-Z' | \
+ sed -e 's/\(CIRCLE\)(\([0-9.-]\+\),\([0-9.-]\+\),\([0-9.-]\+\))/"!\1" \2 0 0 0 \3 0 0 0 \4 0 0 0 0 0 0 0 1/' | \
+ tr '"' "'" > ${TMP_DATAFILE}
+
+fcreate cdfile=${TMP_CDFILE} datafile=${TMP_DATAFILE} \
+ outfile=${NEW_REGFITS} tbltype="binary" extname="REGION"
+
+cphead "${ORIG_REGFITS}[Primary]" "${NEW_REGFITS}[Primary]"
+cphead "${ORIG_REGFITS}[REGION]" "${NEW_REGFITS}[REGION]"
+
+rm -f ${TMP_CDFILE} ${TMP_DATAFILE}
+
diff --git a/astro/xmm/make_spectrum.sh b/astro/xmm/make_spectrum.sh
new file mode 100755
index 0000000..762f971
--- /dev/null
+++ b/astro/xmm/make_spectrum.sh
@@ -0,0 +1,95 @@
+#!/bin/sh
+#
+# Generate object and background spectra, and generate also RMF and ARF.
+#
+#
+# Weitian LI
+# Created: 2015-11-09
+# Updated: 2015-11-09
+#
+
+
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` det=<detector> regtxt=<regtxt> rootname=<rootname> grpcnts=<group_mincounts>\n"
+ exit 1
+ ;;
+esac
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]; do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+## functions }}}
+
+getopt_keyval "$@"
+
+# Target
+DET="${det}"
+REGTXT="${regtxt}"
+ROOTNAME="${rootname}"
+GRPCNTS="${grpcnts:-50}"
+echo "GRPCNTS: ${GRPCNTS}"
+
+DET_TYPE=`echo "${DET}" | tr -d '12'`
+PREFIX=`\ls ${DET}*-clean.fits | sed 's/^\(mos\|pn\)\([12]*S[0-9]\{3\}\).*$/\2/'`
+[ ! -e "${REGTXT}" ] && echo "ERROR: ${REGTXT} not exist!" && exit 11
+
+# clean-up previously generated files
+#rm -fv ${DET_TYPE}${PREFIX}-corn.fits
+rm -fv ${DET_TYPE}${PREFIX}-obj-im.fits
+rm -fv ${DET_TYPE}${PREFIX}-exp-im.fits ${DET_TYPE}${PREFIX}-obj-im-sp-det.fits
+rm -fv ${DET_TYPE}${PREFIX}-mask-im.fits
+rm -fv ${DET_TYPE}${PREFIX}-obj.pi ${DET_TYPE}${PREFIX}-back.pi
+rm -fv ${DET_TYPE}${PREFIX}.rmf ${DET_TYPE}${PREFIX}.arf
+
+if [ "${DET}" = "pn" ]; then
+ rm -fv ${DET_TYPE}${PREFIX}-obj-oot.pi ${DET_TYPE}${PREFIX}-obj-im-oot.fits
+ #rm -fv ${DET_TYPE}${PREFIX}-corn-oot.pi
+ SPEC_CMD="pn-spectra prefix=${PREFIX} region=${REGTXT} caldb=${SAS_ESAS_CALDB} mask=1 elow=0 ehigh=0 quad1=1 quad2=1 quad3=1 quad4=1"
+ BACK_CMD="pn_back prefix=${PREFIX} caldb=${SAS_ESAS_CALDB} elow=0 ehigh=0 quad1=1 quad2=1 quad3=1 quad4=1"
+else
+ SPEC_CMD="mos-spectra prefix=${PREFIX} region=${REGTXT} caldb=${SAS_ESAS_CALDB} mask=1 elow=0 ehigh=0 ccd1=1 ccd2=1 ccd3=1 ccd4=1 ccd5=1 ccd6=1 ccd7=1"
+ BACK_CMD="mos_back prefix=${PREFIX} caldb=${SAS_ESAS_CALDB} elow=0 ehigh=0 ccd1=1 ccd2=1 ccd3=1 ccd4=1 ccd5=1 ccd6=1 ccd7=1"
+fi
+
+eval ${SPEC_CMD}
+eval ${BACK_CMD}
+
+# Rename products
+mv -v ${DET_TYPE}${PREFIX}-obj.pi ${DET}_${ROOTNAME}.pi
+mv -v ${DET_TYPE}${PREFIX}-back.pi ${DET}_${ROOTNAME}_back.pi
+mv -v ${DET_TYPE}${PREFIX}.rmf ${DET}_${ROOTNAME}.rmf
+mv -v ${DET_TYPE}${PREFIX}.arf ${DET}_${ROOTNAME}.arf
+mv -v ${DET_TYPE}${PREFIX}-obj-im-sp-det.fits ${DET}_${ROOTNAME}-sp.fits
+mv -v ${DET_TYPE}${PREFIX}-spec.qdp ${DET}_${ROOTNAME}-spec.qdp
+mv -v ${DET_TYPE}${PREFIX}-aug.qdp ${DET}_${ROOTNAME}-aug.qdp
+if [ "${DET}" = "pn" ]; then
+ mv -v ${DET_TYPE}${PREFIX}-obj-oot.pi ${DET}_${ROOTNAME}-oot.pi
+ mv -v ${DET_TYPE}${PREFIX}-obj-os.pi ${DET}_${ROOTNAME}-os.pi
+ mv -v ${DET_TYPE}${PREFIX}-aug-spec.qdp ${DET}_${ROOTNAME}-aug-spec.qdp
+ mv -v ${DET_TYPE}${PREFIX}-aug-rev-hard.qdp ${DET}_${ROOTNAME}-aug-rev-hard.qdp
+ mv -v ${DET_TYPE}${PREFIX}-aug-rev-rate.qdp ${DET}_${ROOTNAME}-aug-rev-rate.qdp
+fi
+
+# Group spectrum
+specgroup spectrumset="${DET}_${ROOTNAME}.pi" groupedset="${DET}_${ROOTNAME}_grp.pi" rmfset="${DET}_${ROOTNAME}.rmf" arfset="${DET}_${ROOTNAME}.arf" mincounts=${GRPCNTS}
+
+# Clean up again
+rm -fv ${DET_TYPE}${PREFIX}-?ff.pi ${DET_TYPE}${PREFIX}-?fc.pi
+rm -fv ${DET_TYPE}${PREFIX}-?obj.pi ${DET_TYPE}${PREFIX}-?oc.pi
+if [ "${DET}" = "pn" ]; then
+ rm -fv ${DET_TYPE}${PREFIX}-?ff-oot.pi ${DET_TYPE}${PREFIX}-?fc-oot.pi
+ rm -fv ${DET_TYPE}${PREFIX}-?obj-oot.pi ${DET_TYPE}${PREFIX}-?oc-oot.pi
+fi
+
diff --git a/astro/xmm/mos_source_detect.sh b/astro/xmm/mos_source_detect.sh
new file mode 100755
index 0000000..af04e2f
--- /dev/null
+++ b/astro/xmm/mos_source_detect.sh
@@ -0,0 +1,212 @@
+#!/bin/sh
+#
+##################################################################
+## filter, remove source, gti, etc. ##
+## Ref: http://heasarc.gsfc.nasa.gov/docs/xmm/abc/node8.html ##
+## ##
+## LIweitiaNux, February 17, 2012 ##
+##################################################################
+
+usage() {
+ printf "Usage:\n"
+ printf " `basename $0` broad|soft|hard [E_START:E_END]\n"
+}
+
+## process parameters
+if [ $# -eq 1 ]; then
+ E_BAND=$1
+ if [ "${E_BAND}" = "broad" ]; then
+ E_RANGE=300:10000
+ elif [ "${E_BAND}" = "soft" ]; then
+ E_RANGE=300:2000
+ elif [ "${E_BAND}" = "hard" ]; then
+ E_RANGE=2000:10000
+ elif [ "${E_BAND}" = "xid" ]; then
+ E_RANGE=500:4500
+ else
+ printf "ERROR: unknown energy name\n"
+ usage
+ exit 1
+ fi
+elif [ $# -eq 2 ]; then
+ E_BAND=$1
+ E_RANGE=$2
+else
+ printf "ERROR: no parameters given\n"
+ usage
+ exit 2
+fi
+# energy range
+PIMIN=`echo ${E_RANGE} | cut -d':' -f1`
+PIMAX=`echo ${E_RANGE} | cut -d':' -f2`
+if [ "x${PIMIN}" = "x" ] || [ "x${PIMAX}" = "x" ]; then
+ printf "ERROR: given parameters NOT match\n"
+ usage
+ exit 3
+fi
+# get 'ecf'
+read -p " ECF of MOS1 for this energy band: " ECF_M1
+read -p " ECF of MOS2 for this energy band: " ECF_M2
+if [ "x${ECF_M1}" = "x" ] || [ "x${ECF_M2}" = "x" ]; then
+ printf "ERROR: ECF given error\n"
+ exit 10
+fi
+
+WD=`pwd -P`
+VARS_FILE=${WD}/variables
+if ! [ -e ${VARS_FILE} ]; then
+ printf "ERROR: NOT found '${VARS_FILE}' in the current directory\n"
+ printf " You may in wrong directory\n"
+ exit 1
+fi
+## load previous saved variables
+. ${VARS_FILE}
+printf "variables loaded from file '${VARS_FILE}'\n"
+
+## link needed files
+printf "link needed files ...\n"
+cd ${PROC}
+[ -e ${ATTHK} ] || ln -sv ../PPS/${ATTHK} .
+[ -e ${MOS1_EVT} ] || ln -sv ../PPS/${MOS1_EVT} .
+[ -e ${MOS2_EVT} ] || ln -sv ../PPS/${MOS2_EVT} .
+
+## filter the original event file
+printf "filter by energy on the original event file ...\n"
+M1_EVT="${OBS_ID}_mos1.evt"
+M2_EVT="${OBS_ID}_mos2.evt"
+evselect table=${MOS1_EVT} withfilteredset=yes \
+ filteredset=${M1_EVT} filtertype=expression \
+ expression="(PATTERN<=12) && (PI in [200:12000]) && (FLAG==0) && #XMMEA_EM" \
+ keepfilteroutput=yes updateexposure=yes filterexposure=yes
+evselect table=${MOS2_EVT} withfilteredset=yes \
+ filteredset=${M2_EVT} filtertype=expression \
+ expression="(PATTERN<=12) && (PI in [200:12000]) && (FLAG==0) && #XMMEA_EM" \
+ keepfilteroutput=yes updateexposure=yes filterexposure=yes
+
+## make images
+printf "bin event file to make image ...\n"
+M1_IMG="${OBS_ID}_mos1_${E_BAND}.img"
+M2_IMG="${OBS_ID}_mos2_${E_BAND}.img"
+evselect table=${M1_EVT} withimageset=yes imageset=${M1_IMG} \
+ imagebinning=binSize xcolumn=X ximagebinsize=22 \
+ ycolumn=Y yimagebinsize=22 filtertype=expression \
+ expression="(FLAG==0) && (PI in [${E_RANGE}])"
+evselect table=${M2_EVT} withimageset=yes imageset=${M2_IMG} \
+ imagebinning=binSize xcolumn=X ximagebinsize=22 \
+ ycolumn=Y yimagebinsize=22 filtertype=expression \
+ expression="(FLAG==0) && (PI in [${E_RANGE}])"
+
+## make exposure map
+printf "make exposure map ...\n"
+M1_EXP="${OBS_ID}_mos1_${E_BAND}_exp.img"
+M2_EXP="${OBS_ID}_mos2_${E_BAND}_exp.img"
+eexpmap attitudeset=${ATTHK} eventset=${M1_EVT} imageset=${M1_IMG} \
+ expimageset=${M1_EXP} pimin=${PIMIN} pimax=${PIMAX}
+eexpmap attitudeset=${ATTHK} eventset=${M1_EVT} imageset=${M2_IMG} \
+ expimageset=${M2_EXP} pimin=${PIMIN} pimax=${PIMAX}
+
+## make detection mask for detector
+printf "make detection mask for detector ...\n"
+M1_MASK="${OBS_ID}_mos1_${E_BAND}_mask.img"
+M2_MASK="${OBS_ID}_mos2_${E_BAND}_mask.img"
+emask expimageset=${M1_EXP} detmaskset=${M1_MASK}
+emask expimageset=${M2_EXP} detmaskset=${M2_MASK}
+
+## 'local mode' sliding box detection
+## may need to increase parameter 'imagebuffersize', say 'imagebuffersize=2000'
+printf "sliding box detection, in *local* mode ...\n"
+M1_BOXLIST_L="${OBS_ID}_mos1_${E_BAND}_boxlist_local.fits"
+M2_BOXLIST_L="${OBS_ID}_mos2_${E_BAND}_boxlist_local.fits"
+eboxdetect usemap=no likemin=8 withdetmask=yes detmasksets=${M1_MASK} \
+ imagesets=${M1_IMG} expimagesets=${M1_EXP} pimin=${PIMIN} pimax=${PIMAX} \
+ boxlistset=${M1_BOXLIST_L}
+eboxdetect usemap=no likemin=8 withdetmask=yes detmasksets=${M2_MASK} \
+ imagesets=${M2_IMG} expimagesets=${M2_EXP} pimin=${PIMIN} pimax=${PIMAX} \
+ boxlistset=${M2_BOXLIST_L}
+
+## generate spline background map from the non-source regions
+printf "generate background map from the non-source regions ...\n"
+M1_BKG="${OBS_ID}_mos1_${E_BAND}_bkg.img"
+M2_BKG="${OBS_ID}_mos2_${E_BAND}_bkg.img"
+esplinemap bkgimageset=${M1_BKG} imageset=${M1_IMG} scut=0.005 \
+ boxlistset=${M1_BOXLIST_L} nsplinenodes=16 withdetmask=yes \
+ detmaskset=${M1_MASK} withexpimage=yes expimageset=${M1_EXP}
+esplinemap bkgimageset=${M2_BKG} imageset=${M2_IMG} scut=0.005 \
+ boxlistset=${M2_BOXLIST_L} nsplinenodes=16 withdetmask=yes \
+ detmaskset=${M2_MASK} withexpimage=yes expimageset=${M2_EXP}
+
+## sliding box source detection again, in *map* mode
+## improve the detection sensitivity
+## also NOTE the parameter 'imagebuffersize'
+printf "sliding box detection again, in *map* mode ...\n"
+M1_BOXLIST_M="${OBS_ID}_mos1_${E_BAND}_boxlist_map.fits"
+M2_BOXLIST_M="${OBS_ID}_mos2_${E_BAND}_boxlist_map.fits"
+eboxdetect usemap=yes likemin=8 withdetmask=yes detmasksets=${M1_MASK} \
+ imagesets=${M1_IMG} expimagesets=${M1_EXP} pimin=${PIMIN} \
+ pimax=${PIMAX} boxlistset=${M1_BOXLIST_M} \
+ bkgimagesets=${M1_BKG}
+eboxdetect usemap=yes likemin=8 withdetmask=yes detmasksets=${M2_MASK} \
+ imagesets=${M2_IMG} expimagesets=${M2_EXP} pimin=${PIMIN} \
+ pimax=${PIMAX} boxlistset=${M2_BOXLIST_M} \
+ bkgimagesets=${M2_BKG}
+
+## final source list
+## The energy conversion values (ECFs) can be supplied to convert the source
+## count rates into fluxes. The ECFs for each detector and energy band depend
+## on the pattern selection and filter used during the observation. For more
+## information, please consult the calibration paper ``SSC-LUX-TN-0059'',
+## available at the XMM-Newton Science Operations Center or see Table 3.2 in
+## the '2XMM Catalogue User Guide'.
+printf "final source list ...\n"
+M1_EMLLIST="${OBS_ID}_mos1_${E_BAND}_emllist.fits"
+M2_EMLLIST="${OBS_ID}_mos2_${E_BAND}_emllist.fits"
+emldetect imagesets=${M1_IMG} expimagesets=${M1_EXP} bkgimagesets=${M1_BKG} \
+ pimin=${PIMIN} pimax=${PIMAX} boxlistset=${M1_BOXLIST_M} \
+ ecf=${ECF_M1} mlmin=10.0 mllistset=${M1_EMLLIST}
+emldetect imagesets=${M2_IMG} expimagesets=${M2_EXP} bkgimagesets=${M2_BKG} \
+ pimin=${PIMIN} pimax=${PIMAX} boxlistset=${M2_BOXLIST_M} \
+ ecf=${ECF_M2} mlmin=10.0 mllistset=${M2_EMLLIST}
+
+## optional, make a sensitivity map
+printf "optional, make a sensitivity map ...\n"
+M1_SENS="${OBS_ID}_mos1_${E_BAND}_sens.img"
+M2_SENS="${OBS_ID}_mos2_${E_BAND}_sens.img"
+esensmap expimagesets=${M1_EXP} bkgimagesets=${M1_BKG} \
+ detmasksets=${M1_MASK} sensimageset=${M1_SENS} mlmin=10.0
+esensmap expimagesets=${M2_EXP} bkgimagesets=${M2_BKG} \
+ detmasksets=${M2_MASK} sensimageset=${M2_SENS} mlmin=10.0
+
+## write source list to 'ds9 reg' file and view
+M1_SRC="${OBS_ID}_mos1_${E_BAND}_src.reg"
+M2_SRC="${OBS_ID}_mos2_${E_BAND}_src.reg"
+srcdisplay boxlistset=${M1_EMLLIST} imageset=${M1_IMG} \
+ withregionfile=yes regionfile=${M1_SRC} sourceradius=0.003
+srcdisplay boxlistset=${M2_EMLLIST} imageset=${M2_IMG} \
+ withregionfile=yes regionfile=${M2_SRC} sourceradius=0.003
+
+## remove duplicated lines in 'regionfile'
+printf "remove duplicated lines in reg files ...\n"
+mv ${M1_SRC} ${M1_SRC}_tmp
+mv ${M2_SRC} ${M2_SRC}_tmp
+cat ${M1_SRC}_tmp | uniq > ${M1_SRC}
+cat ${M2_SRC}_tmp | uniq > ${M2_SRC}
+rm ${M1_SRC}_tmp ${M2_SRC}_tmp
+
+## save variables
+printf "save variables ...\n"
+CHECK="`grep 'EXP' ${VARS_FILE}`"
+if [ "x${CHECK}" = "x" ]; then
+ printf "# `date`\n" >> ${VARS_FILE}
+ printf "M1_EVT=${M1_EVT}\n" >> ${VARS_FILE}
+ printf "M1_IMG=${M1_IMG}\n" >> ${VARS_FILE}
+ printf "M1_BKG=${M1_BKG}\n" >> ${VARS_FILE}
+ printf "M1_EXP=${M1_EXP}\n" >> ${VARS_FILE}
+ printf "M1_SENS=${M1_SENS}\n" >> ${VARS_FILE}
+ printf "M2_EVT=${M2_EVT}\n" >> ${VARS_FILE}
+ printf "M2_IMG=${M2_IMG}\n" >> ${VARS_FILE}
+ printf "M2_BKG=${M2_BKG}\n" >> ${VARS_FILE}
+ printf "M2_EXP=${M2_EXP}\n" >> ${VARS_FILE}
+ printf "M2_SENS=${M2_SENS}\n" >> ${VARS_FILE}
+ printf "\n" >> ${VARS_FILE}
+fi
+
diff --git a/astro/xmm/sky2det.sh b/astro/xmm/sky2det.sh
new file mode 100755
index 0000000..6a7ca22
--- /dev/null
+++ b/astro/xmm/sky2det.sh
@@ -0,0 +1,53 @@
+#!/bin/sh
+#
+# Convert from SKY(X,Y) coordinate to (DETX,DETY) coordinate for XMM.
+# Using the SAS tool "ecoordconv"
+#
+#
+# Weitian LI
+# Created: 2015-11-09
+# Updated: 2015-11-09
+#
+
+if [ $# -ne 3 ]; then
+ echo "usage:"
+ echo " `basename $0` <image> <sky.reg> <det.reg>"
+ exit 1
+fi
+
+IMG="$1"
+SKY_REG="$2"
+DET_REG="$3"
+[ -e "${DET_REG}" ] && mv -fv ${DET_REG} ${DET_REG}_bak
+
+sky2det() {
+ _img=$1
+ _x=$2
+ _y=$3
+ _detxy=`ecoordconv imageset=${_img} x=${_x} y=${_y} coordtype=POS -w 0 -V 0 | \
+ grep -E '^\s*DETX:\s*DETY:' | awk '{ print $3, $4 }'`
+ echo "${_detxy}"
+}
+
+
+cat "${SKY_REG}" | while read line; do
+ if echo "${line}" | grep -iq '^circle'; then
+ # Across a circle region
+ echo "CIRCLE: ${line}"
+ CIRCLE_X=`echo "${line}" | awk -F'[(,)]' '{ print $2 }'`
+ CIRCLE_Y=`echo "${line}" | awk -F'[(,)]' '{ print $3 }'`
+ CIRCLE_R=`echo "${line}" | awk -F'[(,)]' '{ print $4 }'`
+ DETXY=`sky2det ${IMG} ${CIRCLE_X} ${CIRCLE_Y}`
+ CIRCLE_DETX=`echo "${DETXY}" | awk '{ print $1 }'`
+ CIRCLE_DETY=`echo "${DETXY}" | awk '{ print $2 }'`
+ CIRCLE_REG="circle(${CIRCLE_DETX},${CIRCLE_DETY},${CIRCLE_R})"
+ echo "${CIRCLE_REG}" >> ${DET_REG}
+ elif echo "${line}" | grep -iq '^physical'; then
+ echo "detector" >> ${DET_REG}
+ else
+ # Just move to output region file
+ echo "${line}" >> ${DET_REG}
+ fi
+done
+
+
diff --git a/astro/xmm/sky2det_manual.sh b/astro/xmm/sky2det_manual.sh
new file mode 100644
index 0000000..589a0b2
--- /dev/null
+++ b/astro/xmm/sky2det_manual.sh
@@ -0,0 +1,80 @@
+#!/bin/sh
+#
+# Convert from SKY(X,Y) coordinate to (DETX,DETY) coordinate for XMM.
+# The required conversion coefficients are extracted from input FITS header.
+#
+#
+# Weitian LI
+# Created: 2015-11-09
+# Updated: 2015-11-09
+#
+
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt> x=<sky_x> y=<sky_y>\n"
+ exit 1
+ ;;
+esac
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+
+sky2wcs() {
+ sky_val=$1
+ crpx=$2
+ crvl=$3
+ cdlt=$4
+ wcs_val=`python -c "import math; print(${crvl} + math.tan(${cdlt} * (${sky_val} - (${crpx}))))"`
+ echo ${wcs_val}
+}
+## functions }}}
+
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+[ ! -e "${evt}" ] && echo "ERROR: ${evt} not exist" && exit 11
+
+# Get the WCS conversion coefficients
+fkeypar "${evt}" REFXCTYP
+REFXCTYP=`pget fkeypar value`
+fkeypar "${evt}" REFXCRPX
+REFXCRPX=`pget fkeypar value`
+fkeypar "${evt}" REFXCRVL
+REFXCRVL=`pget fkeypar value`
+fkeypar "${evt}" REFXCDLT
+REFXCDLT=`pget fkeypar value`
+fkeypar "${evt}" REFYCTYP
+REFYCTYP=`pget fkeypar value`
+fkeypar "${evt}" REFYCRPX
+REFYCRPX=`pget fkeypar value`
+fkeypar "${evt}" REFYCRVL
+REFYCRVL=`pget fkeypar value`
+fkeypar "${evt}" REFYCDLT
+REFYCDLT=`pget fkeypar value`
+echo "(X,Y) => (RA,DEC)"
+echo " RA = ${REFXCRVL} + TAN[ (${REFXCDLT}) * (X - (${REFXCRPX})) ]"
+echo " DEC = ${REFYCRVL} + TAN[ (${REFYCDLT}) * (Y - (${REFYCRPX})) ]"
+
+RA=`sky2wcs ${x} ${REFXCRPX} ${REFXCRVL} ${REFXCDLT}`
+DEC=`sky2wcs ${y} ${REFYCRPX} ${REFYCRVL} ${REFYCDLT}`
+echo "sky(${x},${y}) => wcs(${RA},${DEC})"
+
+DETXY=`esky2det datastyle=user ra=${RA} dec=${DEC} outunit=det calinfostyle=set calinfoset="${evt}" | \
+ grep -A 1 -E '^#\s+detX\s+detY' | tail -n 1`
+detx=`echo ${DETXY} | awk '{ print $1 }'`
+dety=`echo ${DETXY} | awk '{ print $2 }'`
+echo "(RA,DEC) => DETXY(${detx},${dety})"
+
diff --git a/astro/xmm/xspec_bkgcorr_xmm.tcl b/astro/xmm/xspec_bkgcorr_xmm.tcl
new file mode 100644
index 0000000..240d4a2
--- /dev/null
+++ b/astro/xmm/xspec_bkgcorr_xmm.tcl
@@ -0,0 +1,447 @@
+#####################################################################
+## XSPEC TCL script
+##
+## To apply background spectrum correction to MOS1 and MOS2 background.
+## It fakes the XB (Galatic + CXB) and NXB (SP + instrument lines)
+## spectra according to the current fitting results, and then invoke
+## "mathpha" to make the final corrected background spectrum.
+##
+## NOTE:
+## The exposure time for the faked spectra is multiplied by a scale
+## factor in order to obtain satisfactory statistics for the higher
+## energy regime. The same scale factor is also applied to the original
+## FWC background spectrum, considering that many channels have "counts"
+## smaller than one, especially in the higher energy regime. Otherwise,
+## many of these values will be rounded to *zero* by the "mathpha"
+## operating in the "COUNT" mode.
+## See also the following description to variable "SCALE_FACTOR",
+## procedures "spectrum_rate2counts" and "scale_spectrum".
+##
+##
+## NOTES:
+## Work with XSPEC v12.x
+## Based on "xspec_bkgcorr_v2.tcl" for Chandra background spectrum correction
+##
+## Weitian LI <liweitianux@gmail.com>
+## Created: 2015-11-07
+## Updated: 2015-11-12
+##
+## ChangeLogs:
+## 2015-11-10:
+## * Remove "diag_rsp"
+## * Add proc update_header:
+## Update specified keywords instead of just copy the original header
+## * Use lists to record the information for all data groups,
+## therefore this tool can handle any number of data groups.
+## 2015-11-11:
+## * Add back "diag_rsp"
+## 2015-11-12:
+## * Update to use "ftkeypar" and "fthedit"
+## * Add procedure "scale_spectrum" to scale the FWC background
+## * Add procedure "make_filename"
+## * Add procedure "spectrum_rate2counts" to convert RATE spectrum to
+## COUNTS, therefore this script can properly handle PN spectrum
+## * Update script description
+#####################################################################
+
+
+## Global variables {{{
+set datagrp_total [ tcloutr datagrp ]
+
+# Lists to record the information of each data group
+set rootname {}
+set rmf {}
+set arf {}
+set diag_rsp {}
+set src_spec {}
+set bkg_spec {}
+set src_exposure {}
+set bkg_exposure {}
+set src_backscale {}
+set bkg_backscale {}
+set modelpars {}
+set back_modelname {}
+set back_modelcomp {}
+set back_modelpars {}
+# This "modelcomp" is not a list
+set modelcomp {}
+
+# FITS header keywords needed to be copyed to the corrected spectrum
+set KEYWORDS {TELESCOP INSTRUME FILTER}
+
+# Scale factor for faking spectra
+# By lengthen the exposure time for the fake process, the total counts
+# of the faked spectra is much larger, therefore the statistical issuses
+# due to small counts (especially in the higher energy regime) is mitigated.
+set SCALE_FACTOR 100
+## Global variables }}}
+
+## Procedures
+# Make new filename by appending the suffix to the stem of filename.
+# e.g., "stem.ext" + "sfix" => "stem_sfix.ext"
+proc make_filename {filename suffix} {
+ regexp -- {(.+)\.([^\.]+)} $filename match stem_ ext_
+ set newname "${stem_}_${suffix}.${ext_}"
+ return $newname
+}
+
+# Determine root name and get other filenames (e.g., rmf, arf, back)
+proc get_filenames {datagrp} {
+ global rootname
+ global src_spec
+ global bkg_spec
+ global rmf
+ global arf
+ global diag_rsp
+ set src_spec_ [ exec basename [ tcloutr filename $datagrp ] ]
+ regexp -- {(.+)_(bin|grp|group)([0-9]*)\.(pi|fits)} $src_spec_ match rootname_ grptype_ grpval_ fext_
+ set bkg_spec_ [ tcloutr backgrnd $datagrp ]
+ set response_ [ tcloutr response $datagrp ]
+ set rmf_ [ lindex $response_ 0 ]
+ set diag_rsp_ [ lindex $response_ 1 ]
+ set arf_ [ lindex [ tcloutr arf $datagrp ] 0 ]
+ lappend rootname $rootname_
+ lappend src_spec $src_spec_
+ lappend bkg_spec $bkg_spec_
+ lappend rmf $rmf_
+ lappend arf $arf_
+ lappend diag_rsp $diag_rsp_
+}
+
+# Get exposure and backscale values
+proc get_exposure_backscale {datagrp} {
+ global src_exposure
+ global bkg_exposure
+ global src_backscale
+ global bkg_backscale
+ set src_exposure_ [ tcloutr expos $datagrp s ]
+ set bkg_exposure_ [ tcloutr expos $datagrp b ]
+ scan [ tcloutr backscal $datagrp s ] "%f" src_backscale_
+ scan [ tcloutr backscal $datagrp b ] "%f" bkg_backscale_
+ lappend src_exposure $src_exposure_
+ lappend bkg_exposure $bkg_exposure_
+ lappend src_backscale $src_backscale_
+ lappend bkg_backscale $bkg_backscale_
+}
+
+# Get the model components
+proc get_modelcomp {} {
+ global datagrp_total
+ global modelcomp
+ # NOTE: "model" contains all the model components corresponding to
+ # echo data group, and contains also the background model components.
+ # Therefore, the "model" contents repeats the same model components
+ # for each data group.
+ # e.g.,
+ # if data group 1 has model components: "wabs*apec", and there are
+ # altogether 2 data groups, then the "model" content is:
+ # "wabs(apec) wabs(apec)"
+ # which is repeated!
+ set model [ tcloutr model ]
+ set indices [ lsearch -all $model {*:} ]
+ if { [ llength $indices ] == 0 } {
+ set modelcomp $model
+ } else {
+ set modelcomp [ lrange $model 0 [ expr {[ lindex $indices 0 ] - 1} ] ]
+ }
+ # Deal with the repeated model components if exist multiple data groups
+ set comp_length [ expr { [ llength $modelcomp ] / $datagrp_total } ]
+ set modelcomp [ lrange $modelcomp 0 [ expr {$comp_length - 1} ] ]
+}
+
+# Get the model parameters
+proc get_modelpars {datagrp} {
+ global datagrp_total
+ global modelpars
+ set modelpars_ ""
+ # NOTE: "modpar" is the total number of parameters,
+ # not just for one data group
+ set npar_ [ expr { [ tcloutr modpar ] / $datagrp_total } ]
+ set par_begin_ [ expr {$npar_ * ($datagrp - 1) + 1} ]
+ set par_end_ [ expr {$npar_ + $par_begin_ - 1} ]
+ for {set i $par_begin_} {$i <= $par_end_} {incr i} {
+ scan [ tcloutr param $i ] "%f" pval_
+ set modelpars_ "$modelpars_ $pval_ &"
+ }
+ lappend modelpars $modelpars_
+}
+
+# Get the model name, components and parameters of the background model
+# corresponding to the given data group
+proc get_backmodel {datagrp} {
+ global back_modelname
+ global back_modelcomp
+ global back_modelpars
+ set model_ [ tcloutr model ]
+ set indices_ [ lsearch -all $model_ {*:} ]
+ set name_idx_ [ lindex $indices_ [ expr {$datagrp - 1} ] ]
+ set back_modelname_ [ regsub {:\s*} [ lindex $model_ $name_idx_ ] "" ]
+ set ncomp_ [ tcloutr modcomp $back_modelname_ ]
+ set back_modelcomp_ [ lrange $model_ [ expr {$name_idx_ + 1} ] [ expr {$name_idx_ + $ncomp_ } ] ]
+ set npar_ [ tcloutr modpar $back_modelname_ ]
+ set back_modelpars_ ""
+ for {set i 1} {$i <= $npar_} {incr i} {
+ scan [ tcloutr param ${back_modelname_}:$i ] "%f" pval_
+ set back_modelpars_ "$back_modelpars_ $pval_ &"
+ }
+ lappend back_modelname $back_modelname_
+ lappend back_modelcomp $back_modelcomp_
+ lappend back_modelpars $back_modelpars_
+}
+
+# Save current XSPEC fitting results
+proc save_xspec_fit {} {
+ set now [ exec date +%Y%m%d%H%M ]
+ set xspec_outfile "xspec_${now}.xcm"
+ if { [ file exists ${xspec_outfile} ] } {
+ exec mv -fv ${xspec_outfile} ${xspec_outfile}_bak
+ }
+ save all "${xspec_outfile}"
+}
+
+# Load model
+proc load_model {comp pars} {
+ model clear
+ model $comp & $pars /*
+}
+
+# Set the norm of the ICM APEC component to be zero,
+# which is the last parameter of current loaded model.
+proc set_icm_norm {} {
+ set npar_ [ tcloutr modpar ]
+ newpar $npar_ 0.0
+ puts "ICM_apec_norm:$npar_: [ tcloutr param $npar_ ]"
+}
+
+# Fake spectrum
+proc fake_spectrum {outfile rmf arf exptime} {
+ if {[ file exists ${outfile} ]} {
+ exec mv -fv ${outfile} ${outfile}_bak
+ }
+ data none
+ fakeit none & $rmf & $arf & y & & $outfile & $exptime & /*
+}
+
+# Convert a spectrum of "RATE" to a spectrum of "COUNTS" by multiplying
+# the exposure time. The "STAT_ERR" column is also scaled if exists.
+# NOTE:
+# The MOS1/2 FWC background spectrum generated by "mos_back" has column
+# "COUNTS", however, the PN FWC background spectrum generated by "pn_back"
+# has column "RATE".
+# Therefore, the PN background spectra should be converted to "COUNTS" format,
+# since the other faked CXB & NXB spectra are also in "COUNTS" format.
+# The exposure time is acquired from the "EXPOSURE" keyword in the header
+# of input spectrum.
+#
+# Return:
+# * 0 : input spectrum is already in "COUNTS" format, no conversion needed
+# * 1 : input spectrum is in "RATE" format, and is converted to "COUNTS"
+# format and write to outfile
+# * -1 : invalid input spectrum
+proc spectrum_rate2counts {outfile infile} {
+ if {[ file exists ${outfile} ]} {
+ exec mv -fv ${outfile} ${outfile}_bak
+ }
+ # Get number of columns/fields
+ exec ftkeypar "${infile}+1" TFIELDS
+ set fields_ [ exec pget ftkeypar value ]
+ set colnames {}
+ for {set i 1} {$i <= $fields_} {incr i} {
+ exec ftkeypar "${infile}+1" "TTYPE${i}"
+ set val_ [ exec pget ftkeypar value ]
+ set colname_ [ string toupper [ string trim $val_ {' } ] ]
+ lappend colnames $colname_
+ }
+ if { [ lsearch $colnames {COUNTS} ] != -1 } {
+ # Input spectrum is already in "COUNTS" format
+ return 0
+ } elseif { [ lsearch $colnames {RATE} ] != -1 } {
+ # Get exposure time
+ exec ftkeypar "${infile}+1" EXPOSURE
+ set exposure_ [ exec pget ftkeypar value ]
+ # Convert column "RATE"
+ set tmpfile "${outfile}_tmp[pid]"
+ exec cp -fv ${infile} ${tmpfile}
+ # COUNTS = RATE * EXPOSURE
+ set fcmd_ "ftcalc \"${tmpfile}\" \"${outfile}\" column=RATE expression=\"RATE * $exposure_\" history=yes"
+ puts $fcmd_
+ exec sh -c $fcmd_
+ exec rm -fv ${tmpfile}
+ # Update column name from "RATE" to "COUNTS", and column units
+ set field_idx_ [ expr { [ lsearch $colnames {RATE} ] + 1 } ]
+ set fcmd_ "fthedit \"${outfile}+1\" keyword=\"TTYPE${field_idx_}\" operation=add value=\"COUNTS\""
+ puts $fcmd_
+ exec sh -c $fcmd_
+ set fcmd_ "fthedit \"${outfile}+1\" keyword=\"TUNIT${field_idx_}\" operation=add value=\"count\""
+ puts $fcmd_
+ exec sh -c $fcmd_
+ # Convert column "STAT_ERR"
+ if { [ lsearch $colnames {STAT_ERR} ] != -1 } {
+ exec mv -fv ${outfile} ${tmpfile}
+ # STAT_ERR = STAT_ERR * EXPOSURE
+ set fcmd_ "ftcalc \"${tmpfile}\" \"${outfile}\" column=STAT_ERR expression=\"STAT_ERR * $exposure_\" history=yes"
+ puts $fcmd_
+ exec sh -c $fcmd_
+ exec rm -fv ${tmpfile}
+ }
+ return 1
+ } else {
+ # Invalid input spectrum
+ return -1
+ }
+}
+
+# Scale the spectrum "COUNTS" and "STAT_ERR" (if exists) columns by a factor.
+# NOTE:
+# This procedure is mainly used to scale the FWC background, within which
+# many channels have "counts" less than one. When using "mathpha" to combine
+# this FWC background and other faked spectra in "COUNT" mode, the channels
+# with "counts" less than one (especially the higher energy regime) may be
+# truncated/rounded to integer, therefore the higher energy regime of the
+# combined spectrum may be greatly underestimated, and will cause the
+# following fitting very bad in the higher energy regime.
+# See also the description about the "SCALE_FACTOR" above.
+proc scale_spectrum {outfile infile factor} {
+ if {[ file exists ${outfile} ]} {
+ exec mv -fv ${outfile} ${outfile}_bak
+ }
+ exec cp -fv ${infile} ${outfile}
+ # Get number of columns/fields
+ exec ftkeypar "${outfile}+1" TFIELDS
+ set fields_ [ exec pget ftkeypar value ]
+ for {set i 1} {$i <= $fields_} {incr i} {
+ exec ftkeypar "${outfile}+1" "TTYPE${i}"
+ set val_ [ exec pget ftkeypar value ]
+ set colname_ [ string toupper [ string trim $val_ {' } ] ]
+ # Scale column "COUNTS"
+ if {$colname_ == "COUNTS"} {
+ set tmpfile "${outfile}_tmp[pid]"
+ exec mv -fv ${outfile} ${tmpfile}
+ # COUNTS = COUNTS * factor
+ set fcmd_ "ftcalc \"${tmpfile}\" \"${outfile}\" column=COUNTS expression=\"COUNTS * $factor\" history=yes"
+ puts $fcmd_
+ exec sh -c $fcmd_
+ exec rm -fv ${tmpfile}
+ }
+ # Scale column "STAT_ERR"
+ if {$colname_ == "STAT_ERR"} {
+ set tmpfile "${outfile}_tmp[pid]"
+ exec mv -fv ${outfile} ${tmpfile}
+ # STAT_ERR = STAT_ERR * factor
+ set fcmd_ "ftcalc \"${tmpfile}\" \"${outfile}\" column=STAT_ERR expression=\"STAT_ERR * $factor\" history=yes"
+ puts $fcmd_
+ exec sh -c $fcmd_
+ exec rm -fv ${tmpfile}
+ }
+ }
+}
+
+# Combine faked spectra to original FWC background spectrum with "mathpha"
+proc combine_spectra {outfile fwc_bkg cxb nxb exposure backscale} {
+ set combine_expr_ "$fwc_bkg + $cxb + $nxb"
+ set comment1_ "Corrected background spectrum; FWC based"
+ set combine_cmd_ "mathpha expr='${combine_expr_}' outfil=${outfile} exposure=${exposure} backscal=${backscale} units=C areascal=% properr=yes ncomments=1 comment1='${comment1_}'"
+ if {[ file exists ${outfile} ]} {
+ exec mv -fv ${outfile} ${outfile}_bak
+ }
+ puts $combine_cmd_
+ exec sh -c $combine_cmd_
+}
+
+# Copy header keywords from original background spectrum to the
+# newly combined background spectrum
+proc update_header {newfile origfile} {
+ global KEYWORDS
+ for {set i 0} {$i < [ llength $KEYWORDS ]} {incr i} {
+ set key_ [ lindex $KEYWORDS $i ]
+ exec ftkeypar "${origfile}+1" $key_
+ set val_ [ exec pget ftkeypar value ]
+ #set fcmd_ "fparkey value=\"$val_\" fitsfile=\"${newfile}+1\" keyword=\"$key_\" add=yes"
+ set fcmd_ "fthedit \"${newfile}+1\" keyword=\"$key_\" operation=add value=\"$val_\""
+ puts $fcmd_
+ exec sh -c $fcmd_
+ }
+}
+
+
+# Save current fit results
+save_xspec_fit
+
+# Get the current model components, which is the same for all data groups
+get_modelcomp
+
+for {set dg 1} {$dg <= $datagrp_total} {incr dg} {
+ get_filenames $dg
+ get_exposure_backscale $dg
+ get_modelpars $dg
+ get_backmodel $dg
+}
+
+puts "modelcomp: $modelcomp"
+puts "rootname: $rootname"
+puts "rmf: $rmf"
+puts "arf: $arf"
+puts "diag_rsp: $diag_rsp"
+puts "src_spec: $src_spec"
+puts "bkg_spec: $bkg_spec"
+puts "src_exposure: $src_exposure"
+puts "bkg_exposure: $bkg_exposure"
+puts "src_backscale: $src_backscale"
+puts "bkg_backscale: $bkg_backscale"
+puts "modelpars: $modelpars"
+puts "back_modelname: $back_modelname"
+puts "back_modelcomp: $back_modelcomp"
+puts "back_modelpars: $back_modelpars"
+puts "SCALE_FACTOR: $SCALE_FACTOR"
+
+# DEBUG
+set DEBUG 0
+if {$DEBUG != 1} {
+
+# Clear all loaded model data and models
+data none
+model clear
+
+for {set idg 0} {$idg < $datagrp_total} {incr idg} {
+ set rootname_ [ lindex $rootname $idg ]
+ set bkg_spec_ [ lindex $bkg_spec $idg ]
+ set rmf_ [ lindex $rmf $idg ]
+ set arf_ [ lindex $arf $idg ]
+ set diag_rsp_ [ lindex $diag_rsp $idg ]
+ set bkg_exposure_ [ lindex $bkg_exposure $idg ]
+ set bkg_backscale_ [ lindex $bkg_backscale $idg ]
+ set modelpars_ [ lindex $modelpars $idg ]
+ set back_modelcomp_ [ lindex $back_modelcomp $idg ]
+ set back_modelpars_ [ lindex $back_modelpars $idg ]
+ set fake_exposure [ expr {$bkg_exposure_ * $SCALE_FACTOR} ]
+ set fake_backscale [ expr {$bkg_backscale_ * $SCALE_FACTOR} ]
+ set fake_cxb "fake_cxb_${rootname_}.pi"
+ set fake_nxb "fake_nxb_${rootname_}.pi"
+ set bkgspec_cnts [ make_filename $bkg_spec_ "cnts" ]
+ set bkgcorr_outfile "bkgcorr_${rootname_}.pi"
+ # Fake CXB and NXB
+ load_model $modelcomp $modelpars_
+ set_icm_norm
+ fake_spectrum $fake_cxb $rmf_ $arf_ $fake_exposure
+ load_model $back_modelcomp_ $back_modelpars_
+ fake_spectrum $fake_nxb $diag_rsp_ "" $fake_exposure
+ # Convert FWC background spectrum from "RATE" to "COUNTS" for PN
+ set ret [ spectrum_rate2counts $bkgspec_cnts $bkg_spec_ ]
+ if { $ret == 0 } {
+ set bkgspec_cnts $bkg_spec_
+ } elseif { $ret == 1 } {
+ puts "Converted RATE spectrum to COUNTS: $bkgspec_cnts"
+ } else {
+ return -code error "ERROR: invalid spectrum for 'spectrum_rate2counts'"
+ }
+ set bkgspec_scaled [ make_filename $bkgspec_cnts "sf${SCALE_FACTOR}" ]
+ # Scale original FWC background before combination
+ scale_spectrum $bkgspec_scaled $bkgspec_cnts $SCALE_FACTOR
+ # Combine faked spectra with original FWC background
+ combine_spectra $bkgcorr_outfile $bkgspec_scaled $fake_cxb $fake_nxb $bkg_exposure_ $fake_backscale
+ update_header $bkgcorr_outfile $bkg_spec_
+}
+
+}
+
+# vim: set ts=8 sw=4 tw=0 fenc=utf-8 ft=tcl: #