summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAaron LI <aaronly.me@outlook.com>2016-04-26 16:59:05 +0800
committerAaron LI <aaronly.me@outlook.com>2016-04-26 16:59:05 +0800
commit682e5dd48ac854fc8a1cc49fd572663284903cf4 (patch)
tree86b57216756f18b416cf63756e673f8ddc563960
downloadcexcess-682e5dd48ac854fc8a1cc49fd572663284903cf4.tar.bz2
Use git to manage the tools of excess_sample
-rw-r--r--.gitignore12
-rwxr-xr-xanalyze_path.sh63
-rwxr-xr-xbuild_sample.sh128
-rwxr-xr-xclean_imgdir.sh28
-rwxr-xr-xclean_repro_zzh.sh35
-rwxr-xr-xcollect_results.sh85
-rwxr-xr-xcompress_fits.sh50
-rwxr-xr-xds9_image.sh102
-rwxr-xr-xextract_info.py165
-rwxr-xr-xmake_json_info_zzh.py54
-rwxr-xr-xmake_r500_regions.py90
-rwxr-xr-xmake_stowbkg_image.sh289
-rwxr-xr-xprepare_sbpfit.py98
-rwxr-xr-xprepare_sbpfit_dir.sh67
-rwxr-xr-xsbp_fit.py803
-rwxr-xr-xupdate_r4_header.sh56
16 files changed, 2125 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..085391c
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,12 @@
+# .gitignore
+
+*~
+*.swp
+*.un~
+*_bak
+
+# python
+__init__.py
+*.pyc
+**/__pycache__
+
diff --git a/analyze_path.sh b/analyze_path.sh
new file mode 100755
index 0000000..e39ffc0
--- /dev/null
+++ b/analyze_path.sh
@@ -0,0 +1,63 @@
+#!/bin/sh
+
+analyze_path() {
+ # extract `obs id' and `source name' from path
+ echo "$@" | awk '
+ # main part
+ {
+ if (NF==1) {
+ ## oi & name
+ input=($1 "/")
+ if (input ~ /_oi/) {
+ ## PATTERN: .../$name_oi$oi/...
+ idx_oi = match(input, /oi[0-9]+/) + 2; # "2" skip the "oi"
+ len_oi = RLENGTH - 2;
+ oi = substr(input, idx_oi, len_oi);
+ idx_name = match(input, /\/[a-zA-Z0-9.+-]+_oi/) + 1;
+ len_name = RLENGTH - 4;
+ name = substr(input, idx_name, len_name);
+ owner = "lwt";
+ }
+ else {
+ ## PATTERN: .../$name/$oi/...
+ idx_oi = match(input, /\/[0-9]+\//) + 1;
+ len_oi = RLENGTH - 2;
+ oi = substr(input, idx_oi, len_oi);
+ idx_name1 = match(input, /\/[a-zA-Z0-9_.+-]+\/[0-9]+\//);
+ len_name1 = RLENGTH;
+ name1 = substr(input, idx_name1, len_name1);
+ idx_name = match(name1, /\/[a-zA-Z0-9_.+-]+\//) + 1;
+ len_name = RLENGTH - 2;
+ name = substr(name1, idx_name, len_name);
+ owner = "zzh";
+ }
+ ## output
+ printf("input: %s\n", input)
+ printf("oi: %s\nname: %s\nowner: %s\n", oi, name, owner)
+ }
+ else {
+ printf("*** WARNING: invalid input: %s\n", $0)
+ }
+ }
+ # END { }
+ '
+}
+
+
+case "$1" in
+ -[hH]*)
+ echo "Usage:"
+ echo " `basename $0` <path1> ..."
+ exit 1
+ ;;
+esac
+
+echo "NAME,OI,PATH"
+while [ ! -z "$1" ]; do
+ path="$1"
+ shift
+ name=`analyze_path "${path}" | grep '^name:' | awk '{ print $2 }'`
+ oi=`analyze_path "${path}" | grep '^oi:' | awk '{ print $2 }'`
+ echo "${name},${oi},${path}"
+done
+
diff --git a/build_sample.sh b/build_sample.sh
new file mode 100755
index 0000000..08fb76c
--- /dev/null
+++ b/build_sample.sh
@@ -0,0 +1,128 @@
+#!/bin/sh
+#
+# Build and/or update the sample of studying central X-ray brightness
+# excess, by only copying/updating the required products from our complete
+# mass sample.
+#
+# Aaron LI
+# Created: 2016-03-02
+# Updated: 2016-04-15
+#
+# ChangeLog:
+# 2016-04-15:
+# * Add support for zzh's sample
+# * Convert "_" to "-" in NAME
+#
+
+analyze_path() {
+ # extract `obs_id' and `source name' from path
+ #
+ # Weitian LI <liweitianux@gmail.com>
+ # 2013/02/04
+ #
+ # input:
+ # path that include `oi' and `source name'
+ # e.g.:
+ #
+ # output:
+ #
+
+ echo "$@" | awk '
+ # main part
+ {
+ if (NF==1) {
+ ## oi & name
+ input=($1 "/")
+ if (input ~ /_oi/) {
+ ## PATTERN: .../$name_oi$oi/...
+ idx_oi = match(input, /oi[0-9]+/) + 2; # "2" skip the "oi"
+ len_oi = RLENGTH - 2;
+ oi = substr(input, idx_oi, len_oi);
+ idx_name = match(input, /\/[a-zA-Z0-9.+-]+_oi/) + 1;
+ len_name = RLENGTH - 4;
+ name = substr(input, idx_name, len_name);
+ owner = "lwt";
+ }
+ else {
+ ## PATTERN: .../$name/$oi/...
+ idx_oi = match(input, /\/[0-9]+\//) + 1;
+ len_oi = RLENGTH - 2;
+ oi = substr(input, idx_oi, len_oi);
+ idx_name1 = match(input, /\/[a-zA-Z0-9_.+-]+\/[0-9]+\//);
+ len_name1 = RLENGTH;
+ name1 = substr(input, idx_name1, len_name1);
+ idx_name = match(name1, /\/[a-zA-Z0-9_.+-]+\//) + 1;
+ len_name = RLENGTH - 2;
+ name = substr(name1, idx_name, len_name);
+ owner = "zzh";
+ }
+ ## output
+ printf("input: %s\n", input)
+ printf("oi: %s\nname: %s\nowner: %s\n", oi, name, owner)
+ }
+ else {
+ printf("*** WARNING: invalid input: %s\n", $0)
+ }
+ }
+ # END { }
+ '
+}
+
+
+if [ $# -lt 2 ]; then
+ echo "Usage:"
+ echo " `basename $0` <dest_root_dir> <repro_dir_list>"
+ echo " `basename $0` <dest_root_dir> <repro_dir1> ..."
+ exit 1
+fi
+
+DEST_ROOT="$1"
+[ ! -d "${DEST_ROOT}" ] && mkdir -v "${DEST_ROOT}"
+
+# repro dir's
+if [ -f "$2" ]; then
+ REPRO_LIST="$2"
+elif [ -d "$2" ]; then
+ REPRO_LIST="_tmp_repro_$$.list"
+ [ -f "${REPRO_LIST}" ] && mv -f "${REPRO_LIST}" "${REPRO_LIST}_bak"
+ while [ ! -z "$2" ]; do
+ echo "$2" >> "${REPRO_LIST}"
+ shift
+ done
+else
+ echo "ERROR: invalid arguments: $2"
+ exit 2
+fi
+
+INIT_DIR=`pwd -P`
+cat "${REPRO_LIST}" | while read repro_dir; do
+ OI=`analyze_path "${repro_dir}" | grep '^oi:' | awk '{ print $2 }'`
+ NAME=`analyze_path "${repro_dir}" | grep '^name:' | awk '{ print $2 }'`
+ OWNER=`analyze_path "${repro_dir}" | grep '^owner:' | awk '{ print $2 }'`
+ # avoid "_" in name
+ NAME=`echo "${NAME}" | tr '_' '-'`
+ echo "********* ${NAME}_oi${OI} *********"
+ # create directories
+ DEST="${DEST_ROOT}/${NAME}_oi${OI}"
+ #[ ! -d "${DEST}/repro" ] && mkdir -pv ${DEST}/repro
+ [ ! -d "${DEST}/repro" ] && continue # Only update sample
+ cd "${DEST}/repro"
+ # simply copy the whole sub-directories
+ cp -av ${repro_dir}/acisf*.fits .
+ cp -av ${repro_dir}/acisf*.lis ${repro_dir}/pcadf*.fits .
+ cp -av ${repro_dir}/evt ${repro_dir}/bkg ${repro_dir}/img .
+ cp -av ${repro_dir}/*_INFO.json .
+ if [ "${OWNER}" = "zzh" ]; then
+ cp -av ${repro_dir}/spc/profile/rspec.reg ./img/
+ cp -av ${repro_dir}/../evt2/spc/profile/rspec.reg ./img/rspec2.reg
+ cp -av ${repro_dir}/../evt2/img/sbprofile.reg ./img/sbprofile2.reg
+ cd ./img
+ [ ! -f "rspec.reg" ] && ln -sv rspec2.reg rspec.reg
+ [ ! -f "sbprofile.reg" ] && ln -sv sbprofile2.reg sbprofile.reg
+ fi
+ # apply clean up
+ find . \( -iname '*_bak' -o -iname '_tmp*' \) -delete
+ find . -type l -iname '*.dat' -delete
+ cd "${INIT_DIR}"
+done
+
diff --git a/clean_imgdir.sh b/clean_imgdir.sh
new file mode 100755
index 0000000..8cc51ed
--- /dev/null
+++ b/clean_imgdir.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+#
+# Clean the 'img' directories for the excess_sample.
+#
+#
+# Aaron LI
+# 2016-04-11
+#
+
+case "$1" in
+ ""|-[hH]*)
+ echo "Usage: `basename $0` <imgdir1> ..."
+ exit 1
+esac
+
+INIT_DIR=`pwd -P`
+while [ ! -z "$1" ]; do
+ imgdir="$1"
+ shift
+ cd ${INIT_DIR}
+ cd ${imgdir}
+ echo "*** Cleaning '${imgdir}' ..."
+ rm -fv _* *~ .?*~
+ rm -fv celld*.reg csb_results*.txt rspec*bak
+ rm -fv img_*.fits *fov*.fits ds9.jpg
+ rm -fv sbp*.log expcorr*.log
+done
+
diff --git a/clean_repro_zzh.sh b/clean_repro_zzh.sh
new file mode 100755
index 0000000..1478f9c
--- /dev/null
+++ b/clean_repro_zzh.sh
@@ -0,0 +1,35 @@
+#!/bin/sh
+#
+# Clean the 'repro' and its subdirectories for the sources
+# drawn from zzh among the excess_sample.
+#
+#
+# Aaron LI
+# 2016-04-12
+#
+
+case "$1" in
+ ""|-[hH]*)
+ echo "Usage: `basename $0` <repro_dir1> ..."
+ exit 1
+esac
+
+INIT_DIR=`pwd -P`
+while [ ! -z "$1" ]; do
+ repro_dir="$1"
+ shift
+ cd ${INIT_DIR}
+ cd ${repro_dir}
+ echo "*** Cleaning '${repro_dir}' ..."
+ cd evt
+ rm -fv *~ .?*~ _* *.log
+ rm -fv evt2*_orig.fits evt2*_rmsrcs.fits
+ rm -fv img_*.fits sbprofile.reg *fov*.fits
+ mv -fv peak*.reg ../img/
+ cd ../bkg
+ rm -fv *~ .?*~ _* *.log
+ cd ../img
+ rm -fv *~ .?*~ _* *.log
+ rm -fv rspec*bak *fov*.fits img_*.fits
+done
+
diff --git a/collect_results.sh b/collect_results.sh
new file mode 100755
index 0000000..d62d291
--- /dev/null
+++ b/collect_results.sh
@@ -0,0 +1,85 @@
+#!/bin/sh
+#
+# Collect the fitting results and orgnize in CSV format.
+#
+# Aaron LI
+# Created: 2016-03-29
+#
+
+if [ $# -ne 1 ]; then
+ echo "Usage:"
+ echo " `basename $0` <sbpfit_dir_list>"
+fi
+
+SBPFIT_CONF="sbpfit_sbeta.conf"
+SBPFIT_DBETA_CONF="sbpfit_dbeta.conf"
+CUR=`pwd -P`
+
+# header
+printf "Name,ObsID,npoints,"
+printf "sbeta_ignore,sbeta_npoints,sbeta_chisq,sbeta_redchisq,s0,s0_E68L,s0_E68U,rc,rc_E68L,rc_E68U,beta,beta_E68L,beta_E68U,bkg,bkg_E68L,bkg_E68U,"
+printf "dbeta_ignore,dbeta_npoints,dbeta_chisq,dbeta_redchisq,s01,s01_E68L,s01_E68U,rc1,rc1_E68L,rc1_E68U,beta1,beta1_E68L,beta1_E68U,s02,s02_E68L,s02_E68U,rc2,rc2_E68L,rc2_E68U,beta2,beta2_E68L,beta2_E68U,dbeta_bkg,dbeta_bkg_E68L,dbeta_bkg_E68U\n"
+
+cat $1 | while read sbpfit_dir; do
+ cd ${CUR}
+ cd ${sbpfit_dir}
+ NAME=`grep '^name' ${SBPFIT_CONF} | awk -F'=' '{ print $2 }' | xargs`
+ OBSID=`grep '^obsid' ${SBPFIT_CONF} | awk -F'=' '{ print $2 }' | xargs`
+ SBP_FILE=`grep '^sbpfile' ${SBPFIT_CONF} | awk -F'=' '{ print $2 }' | xargs`
+ RES_FILE=`grep '^outfile' ${SBPFIT_CONF} | awk -F'=' '{ print $2 }' | xargs`
+ DBETA_RES_FILE=`grep '^outfile' ${SBPFIT_DBETA_CONF} | awk -F'=' '{ print $2 }' | xargs`
+ npoints=`grep -vE '^\s*#' ${SBP_FILE} | wc -l`
+ ## single-beta model fitting results
+ ignore=`grep '^ignore' ${SBPFIT_CONF} | awk -F'=' '{ print $2 }' | xargs | tr ',' ';'`
+ npoints_fit=`grep -E '^\s+# data points' ${RES_FILE} | awk -F'=' '{ print $2 }' | xargs`
+ chisq=`grep -E '^\s+chi-square' ${RES_FILE} | awk -F'=' '{ print $2 }' | xargs`
+ redchisq=`grep -E '^\s+reduced chi-square' ${RES_FILE} | awk -F'=' '{ print $2 }' | xargs`
+ s0=`grep '^s0:' ${RES_FILE} | awk '{ print $4 }'`
+ rc=`grep '^rc:' ${RES_FILE} | awk '{ print $4 }'`
+ beta=`grep '^beta:' ${RES_FILE} | awk '{ print $4 }'`
+ bkg=`grep '^bkg:' ${RES_FILE} | awk '{ print $4 }'`
+ # 68% confidence interval
+ s0_E68L=`grep '^s0:' ${RES_FILE} | awk '{ print $3 }'`
+ s0_E68U=`grep '^s0:' ${RES_FILE} | awk '{ print $5 }'`
+ rc_E68L=`grep '^rc:' ${RES_FILE} | awk '{ print $3 }'`
+ rc_E68U=`grep '^rc:' ${RES_FILE} | awk '{ print $5 }'`
+ beta_E68L=`grep '^beta:' ${RES_FILE} | awk '{ print $3 }'`
+ beta_E68U=`grep '^beta:' ${RES_FILE} | awk '{ print $5 }'`
+ bkg_E68L=`grep '^bkg:' ${RES_FILE} | awk '{ print $3 }'`
+ bkg_E68U=`grep '^bkg:' ${RES_FILE} | awk '{ print $5 }'`
+ printf "${NAME},${OBSID},${npoints},"
+ printf "${ignore},${npoints_fit},${chisq},${redchisq},${s0},${s0_E68L},${s0_E68U},${rc},${rc_E68L},${rc_E68U},${beta},${beta_E68L},${beta_E68U},${bkg},${bkg_E68L},${bkg_E68U},"
+ if [ -r "${DBETA_RES_FILE}" ]; then
+ ## double-beta model fitting results
+ dbeta_ignore=`grep '^ignore' ${SBPFIT_DBETA_CONF} | awk -F'=' '{ print $2 }' | xargs | tr ',' ';'`
+ dbeta_npoints_fit=`grep -E '^\s+# data points' ${RES_FILE} | awk -F'=' '{ print $2 }' | xargs`
+ dbeta_chisq=`grep -E '^\s+chi-square' ${DBETA_RES_FILE} | awk -F'=' '{ print $2 }' | xargs`
+ dbeta_redchisq=`grep -E '^\s+reduced chi-square' ${DBETA_RES_FILE} | awk -F'=' '{ print $2 }' | xargs`
+ s01=`grep '^s01:' ${DBETA_RES_FILE} | awk '{ print $4 }'`
+ rc1=`grep '^rc1:' ${DBETA_RES_FILE} | awk '{ print $4 }'`
+ beta1=`grep '^beta1:' ${DBETA_RES_FILE} | awk '{ print $4 }'`
+ s02=`grep '^s02:' ${DBETA_RES_FILE} | awk '{ print $4 }'`
+ rc2=`grep '^rc2:' ${DBETA_RES_FILE} | awk '{ print $4 }'`
+ beta2=`grep '^beta2:' ${DBETA_RES_FILE} | awk '{ print $4 }'`
+ dbeta_bkg=`grep '^bkg:' ${DBETA_RES_FILE} | awk '{ print $4 }'`
+ # 68% confidence interval
+ s01_E68L=`grep '^s01:' ${DBETA_RES_FILE} | awk '{ print $3 }'`
+ s01_E68U=`grep '^s01:' ${DBETA_RES_FILE} | awk '{ print $5 }'`
+ rc1_E68L=`grep '^rc1:' ${DBETA_RES_FILE} | awk '{ print $3 }'`
+ rc1_E68U=`grep '^rc1:' ${DBETA_RES_FILE} | awk '{ print $5 }'`
+ beta1_E68L=`grep '^beta1:' ${DBETA_RES_FILE} | awk '{ print $3 }'`
+ beta1_E68U=`grep '^beta1:' ${DBETA_RES_FILE} | awk '{ print $5 }'`
+ s02_E68L=`grep '^s02:' ${DBETA_RES_FILE} | awk '{ print $3 }'`
+ s02_E68U=`grep '^s02:' ${DBETA_RES_FILE} | awk '{ print $5 }'`
+ rc2_E68L=`grep '^rc2:' ${DBETA_RES_FILE} | awk '{ print $3 }'`
+ rc2_E68U=`grep '^rc2:' ${DBETA_RES_FILE} | awk '{ print $5 }'`
+ beta2_E68L=`grep '^beta2:' ${DBETA_RES_FILE} | awk '{ print $3 }'`
+ beta2_E68U=`grep '^beta2:' ${DBETA_RES_FILE} | awk '{ print $5 }'`
+ dbeta_bkg_E68L=`grep '^bkg:' ${DBETA_RES_FILE} | awk '{ print $3 }'`
+ dbeta_bkg_E68U=`grep '^bkg:' ${DBETA_RES_FILE} | awk '{ print $5 }'`
+ printf "${dbeta_ignore},${dbeta_npoints_fit},${dbeta_chisq},${dbeta_redchisq},${s01},${s01_E68L},${s01_E68U},${rc1},${rc1_E68L},${rc1_E68U},${beta1},${beta1_E68L},${beta1_E68U},${s02},${s02_E68L},${s02_E68U},${rc2},${rc2_E68L},${rc2_E68U},${beta2},${beta2_E68L},${beta2_E68U},${dbeta_bkg},${dbeta_bkg_E68L},${dbeta_bkg_E68U}\n"
+ else
+ printf ",,,,,,,,,,,,,,,,,,,,,,\n"
+ fi
+done
+
diff --git a/compress_fits.sh b/compress_fits.sh
new file mode 100755
index 0000000..dbce197
--- /dev/null
+++ b/compress_fits.sh
@@ -0,0 +1,50 @@
+#!/bin/sh
+#
+# Compress the FITS files that are not used directly.
+#
+# Aaron LI
+# 2016-04-16
+#
+
+# whether to compress all big files (useful for dropped sources)
+FLAG_ALL="NO"
+
+# compress command
+COMPRESS="xz -v"
+#COMPRESS="ls -lh" # test
+
+
+case "$1" in
+ -[hH]*)
+ echo "Usage:"
+ echo " `basename $0` [ -a ] <source_dir1> ..."
+ exit 1
+ ;;
+ -[aA]*)
+ FLAG_ALL="YES"
+ shift
+ ;;
+esac
+
+
+while [ ! -z "$1" ]; do
+ source="$1"
+ shift
+ echo "====== ${source} ======"
+ find ${source}/ -type f \
+ \( -name 'acis*_repro_evt2.fits' -o \
+ -name 'pcadf*_asol*.fits' -o \
+ -name 'blanksky_*.fits' -o \
+ -name '*_tdet.fits' -o \
+ -name 'imgcorr_*.fits' \) \
+ -exec ${COMPRESS} '{}' \;
+ if [ "${FLAG_ALL}" = "YES" ]; then
+ echo "*** ALL ***"
+ find ${source}/ -type f \
+ \( -name 'evt2_*.fits' -o \
+ -name 'expmap_*.fits' -o \
+ -name 'img_*.fits' \) \
+ -exec ${COMPRESS} '{}' \;
+ fi
+done
+
diff --git a/ds9_image.sh b/ds9_image.sh
new file mode 100755
index 0000000..ab01631
--- /dev/null
+++ b/ds9_image.sh
@@ -0,0 +1,102 @@
+#!/bin/sh
+#
+# Use DS9 to visualize the ACIS image, with useful/handy
+# arguments/options passed.
+# Also touch the output image filename for easier save.
+#
+# Aaron LI
+# 2016-04-16
+#
+
+# Default parameters
+FILE_PATTERN="img_c*_fill.fits"
+REG_FILE="r500.reg"
+REG_FORMAT="ds9"
+REG_FOV="skyfov.fits"
+
+
+case "$1" in
+ -[hH]*)
+ echo "Usage:"
+ echo " `basename $0` <img_dir1> ..."
+ exit 1
+ ;;
+esac
+
+
+analyze_path() {
+ # extract `obs id' and `source name' from path
+ echo "$@" | awk '
+ # main part
+ {
+ if (NF==1) {
+ ## oi & name
+ input=($1 "/")
+ if (input ~ /_oi/) {
+ ## PATTERN: .../$name_oi$oi/...
+ idx_oi = match(input, /oi[0-9]+/) + 2; # "2" skip the "oi"
+ len_oi = RLENGTH - 2;
+ oi = substr(input, idx_oi, len_oi);
+ idx_name = match(input, /\/[a-zA-Z0-9.+-]+_oi/) + 1;
+ len_name = RLENGTH - 4;
+ name = substr(input, idx_name, len_name);
+ owner = "lwt";
+ }
+ else {
+ ## PATTERN: .../$name/$oi/...
+ idx_oi = match(input, /\/[0-9]+\//) + 1;
+ len_oi = RLENGTH - 2;
+ oi = substr(input, idx_oi, len_oi);
+ idx_name1 = match(input, /\/[a-zA-Z0-9_.+-]+\/[0-9]+\//);
+ len_name1 = RLENGTH;
+ name1 = substr(input, idx_name1, len_name1);
+ idx_name = match(name1, /\/[a-zA-Z0-9_.+-]+\//) + 1;
+ len_name = RLENGTH - 2;
+ name = substr(name1, idx_name, len_name);
+ owner = "zzh";
+ }
+ ## output
+ printf("input: %s\n", input)
+ printf("oi: %s\nname: %s\nowner: %s\n", oi, name, owner)
+ }
+ else {
+ printf("*** WARNING: invalid input: %s\n", $0)
+ }
+ }
+ # END { }
+ '
+}
+
+
+INIT_DIR=`pwd -P`
+while [ ! -z "$1" ]; do
+ imgdir="$1"
+ shift
+ cd ${imgdir}
+ echo "====== ${PWD} ======"
+ NAME=`analyze_path "${PWD}" | grep '^name:' | awk '{ print $2 }'`
+ FILE=`\ls ${FILE_PATTERN}`
+ [ -z "${FILE}" ] && continue
+ echo "FITS file: ${FILE}"
+ #
+ PNG_SUFFIX=`echo "${FILE%.fits}" | sed -e 's/_c7//' -e 's/_c0-3//'`
+ PNG_FILE="${NAME}_${PNG_SUFFIX}.png"
+ [ ! -f "${PNG_FILE}" ] && touch ${PNG_FILE}
+ echo "PNG file: ${PNG_FILE}"
+ # FoV region
+ if [ -f "${REG_FOV}" ]; then
+ DS9_REG_FOV="-regions color green -regions ${REG_FOV}"
+ fi
+ #
+ ds9 ${FILE} \
+ -bin factor 1 \
+ -smooth radius 4 -smooth yes \
+ -scale asinh \
+ -cmap sls \
+ -geometry 1288x1026-0+0 \
+ -regions format ${REG_FORMAT} \
+ -regions color white -regions ${REG_FILE} ${DS9_REG_FOV}
+ #
+ cd ${INIT_DIR}
+done
+
diff --git a/extract_info.py b/extract_info.py
new file mode 100755
index 0000000..af127b4
--- /dev/null
+++ b/extract_info.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Extract R500 from the '*_INFO.json' file, and center coordinate from
+# existing "sbprofile.reg", and then make the circle regions w.r.t R500
+# in order to visualize the FoV coverage of the observations.
+#
+# Aaron LI
+# 2016-04-22
+#
+# ChangeLog:
+#
+
+import sys
+import glob
+import os
+import re
+import json
+import csv
+import argparse
+from collections import OrderedDict
+
+
+def extract_info(info):
+ """
+ Extract the requested information (i.e., keywords) from the info
+ dictionary.
+ """
+ # all the keys whose info to be extracted (and keep the order)
+ keys = OrderedDict([
+ ("Name", "Source Name"),
+ ("ObsID", ("Obs. ID", int)),
+ ("Detector", "Detector"),
+ ("Exposure_ks", ("Exposure (ks)", float)),
+ ("Exposure_clean_ks", ("Clean Exposure (ks)", float)),
+ ("redshift", ("redshift", float)),
+ ("nH", None),
+ ("Rmax_SBP_pix", ("Rmax_SBP (pixel)", float)),
+ ("Rmax_SBP_kpc", ("Rmax_SBP (kpc)", float)),
+ ("kpc_per_pix", None),
+ ("R500_kpc", None),
+ ("R500_pix", None),
+ ("R500EL_kpc", None),
+ ("R500EU_kpc", None),
+ ("M500", None),
+ ("M500EL", None),
+ ("M500EU", None),
+ ("L500", None),
+ ("L500E", None),
+ ("R200_kpc", None),
+ ("R200_pix", None),
+ ("R200EL_kpc", None),
+ ("R200EU_kpc", None),
+ ("M200", None),
+ ("M200EL", None),
+ ("M200EU", None),
+ ("L200", None),
+ ("L200E", None),
+ ("Tavg_0.2-0.5R500", ("T(0.2-0.5 R500)", float)),
+ ("TavgEL_0.2-0.5R500", ("T_err_l(0.2-0.5 R500)", float)),
+ ("TavgEU_0.2-0.5R500", ("T_err_u(0.2-0.5 R500)", float)),
+ ])
+ # keys corresponding to lwt's INFO
+ keys_lwt = {
+ "nH": ("nH (10^22 cm^-2)", float),
+ "R500_kpc": ("R500 (kpc)", float),
+ "R500EL_kpc": ("R500_err_lower (1sigma)", float),
+ "R500EU_kpc": ("R500_err_upper (1sigma)", float),
+ "M500": ("M500 (M_sun)", float),
+ "M500EL": ("M500_err_lower (1sigma)", float),
+ "M500EU": ("M500_err_upper (1sigma)", float),
+ "L500": ("L500 (erg/s)", float),
+ "L500E": ("L500_err (1sigma)", float),
+ "R200_kpc": ("R200 (kpc)", float),
+ "R200EL_kpc": ("R200_err_lower (1sigma)", float),
+ "R200EU_kpc": ("R200_err_upper (1sigma)", float),
+ "M200": ("M200 (M_sun)", float),
+ "M200EL": ("M200_err_lower (1sigma)", float),
+ "M200EU": ("M200_err_upper (1sigma)", float),
+ "L200": ("L200 (erg/s)", float),
+ "L200E": ("L200_err (1sigma)", float),
+ }
+ # keys corresponding to zzh's INFO
+ keys_zzh = {
+ "nH": ("nh", float),
+ "R500_kpc": ("R500", float),
+ "R500EL_kpc": ("R500_err_lower", float),
+ "R500EU_kpc": ("R500_err_upper", float),
+ "M500": ("M500 (solar mass)", float),
+ "M500EL": ("M500_err_lower (1 sigma)", float),
+ "M500EU": ("M500_err_upper (1 sigma)", float),
+ "L500": ("L500 (erg/s)", float),
+ "L500E": ("L500_err (1 sigma)", float),
+ "R200_kpc": ("R200", float),
+ "R200EL_kpc": ("R200_err_lower", float),
+ "R200EU_kpc": ("R200_err_upper", float),
+ "M200": ("M200", float),
+ "M200EL": ("M200_err_lower", float),
+ "M200EU": ("M200_err_upper", float),
+ "L200": ("L200", float),
+ "L200E": ("L200_err", float),
+ }
+
+ if "IN_SAMPLE" in info.keys():
+ # zzh's INFO
+ keys.update(keys_zzh)
+ else:
+ # lwt's INFO
+ keys.update(keys_lwt)
+
+ data = keys.copy()
+ for k, v in keys.items():
+ if v is None:
+ continue
+ elif isinstance(v, tuple):
+ t = v[1]
+ try:
+ data[k] = t(info[ v[0] ])
+ except (ValueError, TypeError):
+ data[k] = None
+ else:
+ data[k] = info[v]
+
+ data["kpc_per_pix"] = data["Rmax_SBP_kpc"] / data["Rmax_SBP_pix"]
+ data["R500_pix"] = data["R500_kpc"] / data["kpc_per_pix"]
+ data["R200_pix"] = data["R200_kpc"] / data["kpc_per_pix"]
+
+ return data
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Extract data from INFO.json")
+ parser.add_argument("-j", "--json", dest="json", required=False,
+ help="the *_INFO.json file (default: find *_INFO.json)")
+ parser.add_argument("path", nargs="?", default=".",
+ help="path to the directory contains the INFO.json")
+ args = parser.parse_args()
+
+ # path to the directory contains INFO.json
+ if args.path == ".":
+ path = os.getcwd()
+ else:
+ path = args.path
+ os.chdir(path)
+
+ # default "*_INFO.json"
+ info_json = glob.glob("*_INFO.json")[0]
+ if args.json:
+ info_json = args.json
+ json_str = open(info_json).read().rstrip().rstrip(",")
+ info = json.loads(json_str)
+
+ data = extract_info(info)
+ data["PATH"] = path
+ #print(data, file=sys.stderr)
+
+ # output results
+ csv_writer = csv.writer(sys.stdout)
+ csv_writer.writerow(data.keys())
+ csv_writer.writerow(data.values())
+
+
+if __name__ == "__main__":
+ main()
+
diff --git a/make_json_info_zzh.py b/make_json_info_zzh.py
new file mode 100755
index 0000000..f4e4ad5
--- /dev/null
+++ b/make_json_info_zzh.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Extract the source information and save as a JSON file for each
+# source of ZZH from the CSV collection file.
+#
+# Aaron LI
+# 2016-04-14
+#
+
+import os
+import re
+import sys
+import csv
+import json
+import functools
+from collections import OrderedDict
+
+argc = len(sys.argv)
+if argc < 2:
+ print("usage:")
+ print(" %s <input_csv> [ <output_json> <obs_id> ]" % \
+ os.path.basename(sys.argv[0]))
+ sys.exit(1)
+
+# extract default source name and obsid from the output path
+cwd = os.getcwd()
+m = re.match(r".*zzh/(?P<name>[^_]+)_oi(?P<obsid>\d+)/repro.*$", cwd)
+name = m.group("name")
+obsid = m.group("obsid")
+json_fn = name + "_INFO.json"
+
+csv_fn = sys.argv[1]
+if argc >= 3:
+ json_fn = sys.argv[2]
+if argc == 4:
+ obsid = int(sys.argv[3])
+
+with open(csv_fn, "r") as csvfile:
+ csv_reader = csv.reader(csvfile)
+ csv_data = list(csv_reader)
+
+csv_header = csv_data[0]
+obsid_colidx = functools.reduce(None,
+ filter(lambda x: x[1] == "Obs. ID", enumerate(csv_header)))[0]
+csv_obsid = [ x[obsid_colidx] for x in csv_data ]
+obsid_dict = { obsid:idx for idx, obsid in enumerate(csv_obsid) }
+
+obsid_data = csv_data[obsid_dict["%s" % obsid]]
+json_data = OrderedDict(zip(csv_header, obsid_data))
+with open(json_fn, "w") as jsonfile:
+ jsonfile.write(json.dumps(json_data, indent=4, ensure_ascii=False))
+
+# vim: set ts=4 sw=4 tw=0 fenc=utf-8 ft=python: #
diff --git a/make_r500_regions.py b/make_r500_regions.py
new file mode 100755
index 0000000..d7e15cf
--- /dev/null
+++ b/make_r500_regions.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Extract R500 from the '*_INFO.json' file, and center coordinate from
+# existing "sbprofile.reg", and then make the circle regions w.r.t R500
+# in order to visualize the FoV coverage of the observations.
+#
+# Aaron LI
+# 2016-04-15
+#
+# ChangeLog:
+# 2016-04-16:
+# * Fix R500 unit (convert from kpc to Chandra ACIS pixel)
+#
+
+import sys
+import glob
+import os
+import re
+import json
+import argparse
+
+
+def frange(x, y, step):
+ while x < y:
+ yield x
+ x += step
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Make R500 circle regions")
+ parser.add_argument("-j", "--json", dest="json", required=False,
+ help="the *_INFO.json file (default: find ../*_INFO.json)")
+ parser.add_argument("-i", "--regin", dest="regin",
+ required=False, default="sbprofile.reg",
+ help="region from which to extract the center coordinate " + \
+ "(default: sbprofile.reg)")
+ parser.add_argument("-o", "--regout", dest="regout",
+ required=False, default="r500.reg",
+ help="output region filename (default: r500.reg)")
+ args = parser.parse_args()
+
+ # default "*_INFO.json"
+ info_json = glob.glob("../*_INFO.json")[0]
+ if args.json:
+ info_json = args.json
+
+ json_str = open(info_json).read().rstrip().rstrip(",")
+ info = json.loads(json_str)
+
+ if "R500 (kpc)" in info.keys():
+ # lwt's
+ r500_kpc = float(info["R500 (kpc)"])
+ elif "R500" in info.keys():
+ # zzh's
+ r500_kpc = float(info["R500"])
+ else:
+ raise ValueError("Cannot get R500 from INFO.json")
+
+ # Convert kpc to Chandra ACIS pixel
+ rmax_sbp_pix = float(info["Rmax_SBP (pixel)"])
+ rmax_sbp_kpc = float(info["Rmax_SBP (kpc)"])
+ r500_pix = r500_kpc / rmax_sbp_kpc * rmax_sbp_pix
+ print("R500: %.2f (kpc), %.2f (pixel)" % (r500_kpc, r500_pix))
+
+ # get center coordinate
+ regions = open(args.regin).readlines()
+ regions = list(filter(lambda x: re.match(r"^(circle|annulus|pie).*",
+ x, re.I),
+ regions))
+ xc, yc = re.split(r"[(,)]", regions[0])[1:3]
+
+ # output region
+ r500_regions = [
+ '# Region file format: DS9 version 4.1',
+ 'global color=white width=1 font="helvetica 10 normal roman"',
+ 'physical',
+ ]
+ region_fmt = "circle(%s,%s,{0:.7f}) # text={{{1:.1f} R500 = {2:.1f} pix}}" \
+ % (xc, yc)
+ r500_regions.extend([ region_fmt.format(r500_pix*ratio, ratio,
+ r500_pix*ratio)
+ for ratio in frange(0.1, 1.0, 0.1) ])
+ with open(args.regout, "w") as outfile:
+ outfile.write("\n".join(r500_regions) + "\n")
+
+
+if __name__ == "__main__":
+ main()
+
diff --git a/make_stowbkg_image.sh b/make_stowbkg_image.sh
new file mode 100755
index 0000000..fdd824d
--- /dev/null
+++ b/make_stowbkg_image.sh
@@ -0,0 +1,289 @@
+#!/bin/sh
+#
+# Make the corresponding stowed bacground image for the observation.
+#
+# Based on `ciao_blanksky.sh' (v5.0; 2015-06-02)
+#
+# Aaron LI
+# 2016-04-20
+#
+
+## error code {{{
+ERR_USG=1
+ERR_DIR=11
+ERR_EVT=12
+ERR_BKG=13
+ERR_REG=14
+ERR_ASOL=21
+ERR_BPIX=22
+ERR_PBK=23
+ERR_MSK=24
+ERR_BKGTY=31
+ERR_SPEC=32
+## error code }}}
+
+## usage, help {{{
+case "$1" in
+ -[hH]*|--[hH]*)
+ printf "usage:\n"
+ printf " `basename $0` evt=<evt_fits> basedir=<base_dir> imgdir=<img_dir> erange=<elow:ehigh>\n"
+ exit ${ERR_USG}
+ ;;
+esac
+## usage, help }}}
+
+## default parameters {{{
+# default `event file' which used to match `blanksky' files
+#DFT_EVT="_NOT_EXIST_"
+DFT_EVT="`\ls evt2*_clean.fits`"
+# default dir which contains `asols, asol.lis, ...' files
+# DFT_BASEDIR="_NOT_EXIST_"
+DFT_BASEDIR=".."
+# default img directory
+DFT_IMGDIR="../img"
+# default energy range for image creation
+DFT_ERANGE="700:7000"
+
+## howto find files in `basedir'
+# default `asol.lis pattern'
+DFT_ASOLIS_PAT="acis*asol?.lis"
+## default parameters }}}
+
+## functions {{{
+# process commandline arguments
+# cmdline arg format: `KEY=VALUE'
+getopt_keyval() {
+ until [ -z "$1" ]
+ do
+ key=${1%%=*} # extract key
+ val=${1#*=} # extract value
+ keyval="${key}=\"${val}\""
+ echo "## getopt: eval '${keyval}'"
+ eval ${keyval}
+ shift # shift, process next one
+ done
+}
+
+# lookup the corresponding stowed background according to the blanksky
+lookup_bgstow() {
+ _dir=`dirname $1`
+ _blanksky=`basename $1`
+ _det=`echo "${_blanksky}" | sed 's/\(acis[0-7]\).*$/\1/'`
+ _year=`echo "${_blanksky}" | sed 's/.*\(D[0-9]\{4\}\).*$/\1/'`
+ _cti=`echo "${_blanksky}" | sed 's/.*\(cti\).*$/\1/'`
+ if [ "${_year}" = "D1999" ]; then
+ _year="D2000"
+ fi
+ if [ "${_cti}" = "cti" ]; then
+ _bgstow=`\ls ${_dir}/${_det}${_year}-??-??bgstow*.fits | grep 'bgstow_cti'`
+ else
+ _bgstow=`\ls ${_dir}/${_det}${_year}-??-??bgstow*.fits | grep -v 'bgstow_cti'`
+ fi
+ if [ -z "${_bgstow}" ]; then
+ echo "ERROR: cannot found bgstow for blanksky: ${_dir}/${_blanksky}" >/dev/stderr
+ exit 100
+ fi
+ echo "${_bgstow}"
+ unset _dir _blanksky _det _year _bgstow
+}
+
+# reprocess background evt with matched gainfile
+background_regain() {
+ _outfile="$1"
+ _infile="${1%.fits}_ungain.fits"
+ mv ${_outfile} ${_infile}
+ punlearn acis_process_events
+ acis_process_events infile="${_infile}" \
+ outfile="${_outfile}" \
+ acaofffile=NONE stop="none" doevtgrade=no \
+ apply_cti=yes apply_tgain=no \
+ calculate_pi=yes pix_adj=NONE \
+ gainfile="$2" \
+ eventdef="{s:ccd_id,s:node_id,i:expno,s:chip,s:tdet,f:det,f:sky,s:phas,l:pha,l:pha_ro,f:energy,l:pi,s:fltgrade,s:grade,x:status}" \
+ clobber=yes
+ rm -fv ${_infile}
+ unset _infile _outfile
+}
+## functions end }}}
+
+## parameters {{{
+# process cmdline args using `getopt_keyval'
+getopt_keyval "$@"
+
+# check given parameters
+# check evt file
+if [ -r "${evt}" ]; then
+ EVT=${evt}
+elif [ -r "${DFT_EVT}" ]; then
+ EVT=${DFT_EVT}
+else
+ read -p "evt2 file: " EVT
+ if ! [ -r "${EVT}" ]; then
+ printf "ERROR: cannot access given \`${EVT}' evt file\n"
+ exit ${ERR_EVT}
+ fi
+fi
+printf "## use evt file: \`${EVT}'\n"
+# check given dir
+if [ -d "${basedir}" ]; then
+ BASEDIR=${basedir}
+elif [ -d "${DFT_BASEDIR}" ]; then
+ BASEDIR=${DFT_BASEDIR}
+else
+ read -p "basedir (contains asol files): " BASEDIR
+ if [ ! -d ${BASEDIR} ]; then
+ printf "ERROR: given \`${BASEDIR}' NOT a directory\n"
+ exit ${ERR_DIR}
+ fi
+fi
+# remove the trailing '/'
+BASEDIR=`echo ${BASEDIR} | sed 's/\/*$//'`
+printf "## use basedir: \`${BASEDIR}'\n"
+# check img dir
+if [ -n "${imgdir}" ]; then
+ IMGDIR="${imgdir}"
+else
+ IMGDIR="${DFT_IMGDIR}"
+fi
+printf "## use imgdir: \`${IMGDIR}'\n"
+# check energy range
+if [ -n "${erange}" ]; then
+ ERANGE="${erange}"
+else
+ ERANGE="${DFT_ERANGE}"
+fi
+printf "## use energy range: \`${ERANGE}'\n"
+## parameters }}}
+
+## check files in `basedir' {{{
+# check asol files
+ASOLIS=`\ls -1 ${BASEDIR}/${DFT_ASOLIS_PAT} | head -n 1`
+if [ -z ${ASOLIS} ]; then
+ printf "ERROR: cannot find \"${DFT_ASOLIS_PAT}\" in dir \`${BASEDIR}'\n"
+ exit ${ERR_ASOL}
+fi
+printf "## use asolis: \`${ASOLIS}'\n"
+## check files }}}
+
+## prepare parameter files (pfiles) {{{
+CIAO_TOOLS="acis_bkgrnd_lookup dmmerge dmcopy dmmakepar dmreadpar reproject_events acis_process_events"
+
+# Copy necessary pfiles for localized usage
+for tool in ${CIAO_TOOLS}; do
+ pfile=`paccess ${tool}`
+ [ -n "${pfile}" ] && punlearn ${tool} && cp -Lvf ${pfile} .
+done
+
+# Modify environment variable 'PFILES' to use local pfiles first
+export PFILES="./:${PFILES}"
+## pfiles }}}
+
+#### main start {{{
+# decompress the asol file if necessary
+ASOL_XZ=`\ls ${BASEDIR}/pcadf*asol?.fits.xz 2>/dev/null`
+if [ -n "${ASOL_XZ}" ]; then
+ printf "decompressing the asol file ...\n"
+ unxz ${ASOL_XZ}
+fi
+
+printf "look up coresponding background file ...\n"
+punlearn acis_bkgrnd_lookup
+BKG_LKP="`acis_bkgrnd_lookup ${EVT}`"
+AS_NUM=`echo ${BKG_LKP} | tr ' ' '\n' | \grep 'acis7sD' | wc -l`
+AI_NUM=`echo ${BKG_LKP} | tr ' ' '\n' | \grep 'acis[0123]iD' | wc -l`
+## determine detector type: ACIS-S / ACIS-I {{{
+if [ ${AS_NUM} -eq 1 ]; then
+ printf "## ACIS-S, chip: 7\n"
+ CCD="7"
+ BKG_ROOT="stowbkg_c7"
+ STOWBKG=`lookup_bgstow ${BKG_LKP}`
+ cp -v ${STOWBKG} ${BKG_ROOT}_orig.fits
+elif [ ${AI_NUM} -eq 4 ]; then
+ printf "## ACIS-I, chip: 0-3\n"
+ CCD="0:3"
+ BKG_ROOT="stowbkg_c0-3"
+ AI_FILES=""
+ for bkg in ${BKG_LKP}; do
+ STOWBKG=`lookup_bgstow ${bkg}`
+ cp -v ${STOWBKG} .
+ AI_FILES="${AI_FILES},`basename ${STOWBKG}`"
+ done
+ AI_FILES=${AI_FILES#,} # remove the first ','
+ printf "## ACIS-I background files to merge:\n"
+ printf "## \`${AI_FILES}'\n"
+ printf "\`dmmerge' to merge the above blanksky files ...\n"
+ # merge 4 chips blanksky evt files
+ punlearn dmmerge
+ dmmerge "${AI_FILES}" ${BKG_ROOT}_orig.fits clobber=yes
+ rm -fv `echo ${AI_FILES} | tr ',' ' '` # remove original files
+else
+ printf "## ERROR: UNKNOW blanksky files:\n"
+ printf "## ${BKG_ORIG}\n"
+ exit ${ERR_BKG}
+fi
+## determine ACIS type }}}
+
+## check 'DATMODE' {{{
+## filter blanksky files (status=0) for `VFAINT' observations
+DATA_MODE="`dmkeypar ${EVT} DATAMODE echo=yes`"
+printf "## DATAMODE: ${DATA_MODE}\n"
+if [ "${DATA_MODE}" = "VFAINT" ]; then
+ mv -fv ${BKG_ROOT}_orig.fits ${BKG_ROOT}_tmp.fits
+ printf "apply \`status=0' to filter blanksky file ...\n"
+ punlearn dmcopy
+ dmcopy "${BKG_ROOT}_tmp.fits[status=0]" ${BKG_ROOT}_orig.fits clobber=yes
+ rm -fv ${BKG_ROOT}_tmp.fits
+fi
+## DATAMODE, status=0 }}}
+
+## check `GAINFILE' of blanksky and evt2 file {{{
+## if NOT match, then reprocess blanksky
+GAINFILE_EVT="`dmkeypar ${EVT} GAINFILE echo=yes`"
+GAINFILE_BG="`dmkeypar "${BKG_ROOT}_orig.fits" GAINFILE echo=yes`"
+if ! [ "${GAINFILE_EVT}" = "${GAINFILE_BG}" ]; then
+ printf "WARNING: GAINFILE NOT match.\n"
+ printf "event: ${GAINFILE_EVT}\n"
+ printf "blank: ${GAINFILE_BG}\n"
+ printf "reprocess blanksky with evt gainfile ...\n"
+ # reprocess blanksky using matched evt GAINFILE
+ GAINFILE="$CALDB/data/chandra/acis/det_gain/`basename ${GAINFILE_EVT}`"
+ printf "GAINFILE: ${GAINFILE}\n"
+ background_regain "${BKG_ROOT}_orig.fits" ${GAINFILE}
+fi
+## check & match GAINFILE }}}
+
+printf "add the PNT header keywords ... "
+EVT_HEADER="_evt_header.par"
+EVT_PNT="_evt_pnt.par"
+punlearn dmmakepar
+dmmakepar ${EVT} ${EVT_HEADER} clobber=yes
+\grep -i '_pnt' ${EVT_HEADER} > ${EVT_PNT}
+punlearn dmreadpar
+dmreadpar ${EVT_PNT} "${BKG_ROOT}_orig.fits[EVENTS]" clobber=yes
+printf "DONE\n"
+
+printf "reproject the background ...\n"
+punlearn reproject_events
+reproject_events infile=${BKG_ROOT}_orig.fits \
+ outfile=${BKG_ROOT}.fits match=${EVT} \
+ aspect="@${ASOLIS}" random=0 clobber=yes
+
+# Make image of the stowed background
+if [ -f "${IMGDIR}/skyfov.fits" ]; then
+ ln -svf ${IMGDIR}/skyfov.fits .
+ BKG_IMG="img_${BKG_ROOT}_e`echo ${ERANGE} | tr ':' '-'`.fits"
+ printf "creating the background image ...\n"
+ punlearn dmcopy
+ dmcopy infile="${BKG_ROOT}.fits[energy=${ERANGE}][sky=region(skyfov.fits[ccd_id=${CCD}])][bin sky=1]" outfile=${BKG_IMG} clobber=yes
+else
+ printf "ERROR: '${IMGDIR}/skyfov.fits' not exists\n"
+fi
+## main end }}}
+
+# clean
+printf "\nclean ...\n"
+rm -fv ${BKG_ROOT}_orig.fits ${EVT_PNT}
+
+printf "\nFINISHED\n"
+
+# vim: set ts=8 sw=4 tw=0 fenc=utf-8 ft=sh #
diff --git a/prepare_sbpfit.py b/prepare_sbpfit.py
new file mode 100755
index 0000000..4ec7cf4
--- /dev/null
+++ b/prepare_sbpfit.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Prepare the configuration file for the `sbp_fit.py`.
+# And extract name, obsid, r500 information from the '*_INFO.json' file
+# to fill the config.
+#
+# Aaron LI
+# 2016-04-21
+#
+# ChangeLog:
+#
+
+import sys
+import glob
+import os
+import re
+import json
+import argparse
+from datetime import datetime
+
+
+def update_sbpfit_conf(sbpfit_conf, info):
+ """
+ Update the sbpfit configuration according to the INFO.
+
+ Arguments:
+ * sbpfit_conf: list of lines of the sample sbpfit config
+ * info: INFO dictionary
+
+ Return:
+ updated `sbpfit_conf`
+ """
+ name = info["Source Name"]
+ obsid = int(info["Obs. ID"])
+
+ if "R500 (kpc)" in info.keys():
+ # lwt's
+ r500_kpc = float(info["R500 (kpc)"])
+ elif "R500" in info.keys():
+ # zzh's
+ r500_kpc = float(info["R500"])
+ else:
+ raise ValueError("Cannot get R500 from INFO.json")
+ # Convert kpc to Chandra ACIS pixel
+ rmax_sbp_pix = float(info["Rmax_SBP (pixel)"])
+ rmax_sbp_kpc = float(info["Rmax_SBP (kpc)"])
+ r500_pix = r500_kpc / rmax_sbp_kpc * rmax_sbp_pix
+ print("R500: %.2f (kpc), %.2f (pixel)" % (r500_kpc, r500_pix))
+
+ sbpfit_conf_new = []
+ for line in sbpfit_conf:
+ line_new = re.sub(r"<DATE>", datetime.utcnow().isoformat(), line)
+ line_new = re.sub(r"<NAME>", name, line_new)
+ line_new = re.sub(r"<OBSID>", "%s" % obsid, line_new)
+ line_new = re.sub(r"<R500_PIX>", "%.2f" % r500_pix, line_new)
+ line_new = re.sub(r"<R500_KPC>", "%.2f" % r500_kpc, line_new)
+ sbpfit_conf_new.append(line_new)
+
+ return sbpfit_conf_new
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Prepare sbpfit config")
+ parser.add_argument("-j", "--json", dest="json", required=False,
+ help="the *_INFO.json file (default: find ../*_INFO.json)")
+ parser.add_argument("-c", "--config", dest="config", required=True,
+ help="sample sbpfit configuration")
+ parser.add_argument("outfile", nargs="?",
+ help="filename of the output sbpfit config " + \
+ "(default: same as the sample config)")
+ args = parser.parse_args()
+
+ # default "*_INFO.json"
+ info_json = glob.glob("../*_INFO.json")[0]
+ if args.json:
+ info_json = args.json
+
+ json_str = open(info_json).read().rstrip().rstrip(",")
+ info = json.loads(json_str)
+
+ # sample config file
+ sbpfit_conf = open(args.config).readlines()
+
+ # output config file
+ if args.outfile:
+ outfile = args.outfile
+ else:
+ outfile = os.path.basename(args.config)
+
+ sbpfit_conf_new = update_sbpfit_conf(sbpfit_conf, info)
+ with open(outfile, "w") as outf:
+ outf.write("".join(sbpfit_conf_new))
+
+
+if __name__ == "__main__":
+ main()
+
diff --git a/prepare_sbpfit_dir.sh b/prepare_sbpfit_dir.sh
new file mode 100755
index 0000000..100f0e9
--- /dev/null
+++ b/prepare_sbpfit_dir.sh
@@ -0,0 +1,67 @@
+#!/bin/sh
+#
+# Create the new `sbpfit' subdirectory, and prepare the files for fitting
+# the surface brightness profile.
+#
+# Aaron LI
+# Created: 2016-03-28
+#
+
+prepare() {
+ img_dir="$1"
+ info="$2"
+ ln -sv ${img_dir}/sbprofile.* ${img_dir}/sbprofile_rmid.fits .
+ ln -sv ${img_dir}/evt2_c*_clean.fits .
+ # sbpfit config
+ cp ${SBPFIT_SBETA_CONF} ${SBPFIT_DBETA_CONF} .
+ date=`date --iso-8601=seconds`
+ sed -i'' -e "s#<DATE>#${date}#" `basename ${SBPFIT_SBETA_CONF}`
+ sed -i'' -e "s#<DATE>#${date}#" `basename ${SBPFIT_DBETA_CONF}`
+ if [ -n "${info}" ]; then
+ name=`grep 'Source Name' ${info} | awk -F'"' '{ print $4 }'`
+ obsid=`grep 'Obs. ID' ${info} | awk -F':' '{ print $2 }' | tr -d ' ,'`
+ echo "Name: ${name}; ObsID: ${obsid}"
+ # sbeta
+ sed -i'' -e "s#<NAME>#${name}#" `basename ${SBPFIT_SBETA_CONF}`
+ sed -i'' -e "s#<OBSID>#${obsid}#" `basename ${SBPFIT_SBETA_CONF}`
+ # dbeta
+ sed -i'' -e "s#<NAME>#${name}#" `basename ${SBPFIT_DBETA_CONF}`
+ sed -i'' -e "s#<OBSID>#${obsid}#" `basename ${SBPFIT_DBETA_CONF}`
+ fi
+}
+
+
+if [ $# -ne 2 ]; then
+ echo "Usage:"
+ echo " `basename $0` <config_dir> <repro_list>"
+ exit 1
+fi
+
+CUR_DIR=`pwd -P`
+CONFIG_DIR=`realpath $1`
+SBPFIT_SBETA_CONF="${CONFIG_DIR}/sbpfit_sbeta.conf"
+SBPFIT_DBETA_CONF="${CONFIG_DIR}/sbpfit_dbeta.conf"
+
+cat $2 | while read repro; do
+ echo "*** ${repro} ***"
+ cd ${CUR_DIR}
+ cd ${repro}
+ REPRO_DIR=`pwd -P`
+ SBPFIT_DIR="${REPRO_DIR}/sbpfit"
+ [ -d "${SBPFIT_DIR}" ] && mv -fv ${SBPFIT_DIR} ${SBPFIT_DIR}_bak
+ mkdir ${SBPFIT_DIR} && cd ${SBPFIT_DIR}
+ if [ -d "../img/ne" ] && [ -d "../img/sw" ]; then
+ echo "NOTE: exists 'ne' and 'sw' two parts"
+ mkdir ne && cd ne
+ INFO=`realpath ${REPRO_DIR}/*ne_INFO.json 2>/dev/null`
+ prepare ../../img/ne ${INFO}
+ cd ${SBPFIT_DIR}
+ mkdir sw && cd sw
+ INFO=`realpath ${REPRO_DIR}/*sw_INFO.json 2>/dev/null`
+ prepare ../../img/sw ${INFO}
+ else
+ INFO=`realpath ${REPRO_DIR}/*_INFO.json 2>/dev/null`
+ prepare ../img ${INFO}
+ fi
+done
+
diff --git a/sbp_fit.py b/sbp_fit.py
new file mode 100755
index 0000000..10637f3
--- /dev/null
+++ b/sbp_fit.py
@@ -0,0 +1,803 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Aaron LI
+# Created: 2016-03-13
+# Updated: 2016-04-21
+#
+# Changelogs:
+# 2016-04-21:
+# * Plot another X axis with unit "r500", with R500 values marked
+# * Adjust output image size/resolution
+# 2016-04-20:
+# * Support "pix" and "kpc" units
+# * Allow ignore data w.r.t R500 value
+# * Major changes to the config syntax
+# * Add commandline argument to select the sbp model
+# 2016-04-05:
+# * Allow fix parameters
+# 2016-03-31:
+# * Remove `ci_report()'
+# * Add `make_results()' to orgnize all results as s Python dictionary
+# * Report results as json string
+# 2016-03-28:
+# * Add `main()', `make_model()'
+# * Use `configobj' to handle configurations
+# * Save fit results and plot
+# * Add `ci_report()'
+# 2016-03-14:
+# * Refactor classes `FitModelSBeta' and `FitModelDBeta'
+# * Add matplotlib plot support
+# * Add `ignore_data()' and `notice_data()' support
+# * Add classes `FitModelSBetaNorm' and `FitModelDBetaNorm'
+#
+# TODO:
+# * to allow fit the outer beta component, then fix it, and fit the inner one
+#
+
+"""
+Fit the surface brightness profile (SBP) with the single-beta model:
+ s(r) = s0 * [1.0 + (r/rc)^2] ^ (0.5-3*beta) + bkg
+or the double-beta model:
+ s(r) = s01 * [1.0 + (r/rc1)^2] ^ (0.5-3*beta1) +
+ s02 * [1.0 + (r/rc2)^2] ^ (0.5-3*beta2) + bkg
+
+
+Sample config file:
+-------------------------------------------------
+name = <NAME>
+obsid = <OBSID>
+r500_pix = <R500_PIX>
+r500_kpc = <R500_KPC>
+
+sbpfile = sbprofile.txt
+# unit of radius: pix (default) or kpc
+unit = pixel
+
+# sbp model: "sbeta" or "dbeta"
+model = sbeta
+#model = dbeta
+
+# output file to store the fitting results
+outfile = sbpfit.txt
+# output file to save the fitting plot
+imgfile = sbpfit.png
+
+# data range to be ignored during fitting (same unit as the above "unit")
+#ignore = 0.0-20.0,
+# specify the ignore range w.r.t R500 ("r500_pix" or "r500_kpc" required)
+#ignore_r500 = 0.0-0.15,
+
+[sbeta]
+# model-related options (OVERRIDE the upper level options)
+outfile = sbpfit_sbeta.txt
+imgfile = sbpfit_sbeta.png
+#ignore = 0.0-20.0,
+#ignore_r500 = 0.0-0.15,
+ [[params]]
+ # model parameters
+ # name = initial, lower, upper, variable (FIXED/False to fix the parameter)
+ s0 = 1.0e-8, 0.0, 1.0e-6
+ rc = 30.0, 5.0, 1.0e4
+ #rc = 30.0, 5.0, 1.0e4, FIXED
+ beta = 0.7, 0.3, 1.1
+ bkg = 1.0e-10, 0.0, 1.0e-8
+
+
+[dbeta]
+outfile = sbpfit_dbeta.txt
+imgfile = sbpfit_dbeta.png
+#ignore = 0.0-20.0,
+#ignore_r500 = 0.0-0.15,
+ [[params]]
+ s01 = 1.0e-8, 0.0, 1.0e-6
+ rc1 = 50.0, 10.0, 1.0e4
+ beta1 = 0.7, 0.3, 1.1
+ s02 = 1.0e-8, 0.0, 1.0e-6
+ rc2 = 30.0, 2.0, 5.0e2
+ beta2 = 0.7, 0.3, 1.1
+ bkg = 1.0e-10, 0.0, 1.0e-8
+-------------------------------------------------
+"""
+
+__version__ = "0.6.1"
+__date__ = "2016-04-21"
+
+
+import numpy as np
+import lmfit
+import matplotlib.pyplot as plt
+
+from configobj import ConfigObj
+from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
+from matplotlib.figure import Figure
+
+import os
+import sys
+import re
+import argparse
+import json
+from collections import OrderedDict
+
+
+plt.style.use("ggplot")
+
+
+class FitModel:
+ """
+ Meta-class of the fitting model.
+
+ The supplied `func' should have the following syntax:
+ y = f(x, params)
+ where the `params' is the parameters to be fitted,
+ and should be provided as well.
+ """
+ def __init__(self, name=None, func=None, params=lmfit.Parameters()):
+ self.name = name
+ self.func = func
+ self.params = params
+
+ def f(self, x):
+ return self.func(x, self.params)
+
+ def get_param(self, name=None):
+ """
+ Return the requested `Parameter' object or the whole
+ `Parameters' object of no name supplied.
+ """
+ try:
+ return self.params[name]
+ except KeyError:
+ return self.params
+
+ def set_param(self, name, *args, **kwargs):
+ """
+ Set the properties of the specified parameter.
+ """
+ param = self.params[name]
+ param.set(*args, **kwargs)
+
+ def plot(self, params, xdata, ax):
+ """
+ Plot the fitted model.
+ """
+ f_fitted = lambda x: self.func(x, params)
+ ydata = f_fitted(xdata)
+ ax.plot(xdata, ydata, 'k-')
+
+class FitModelSBeta(FitModel):
+ """
+ The single-beta model to be fitted.
+ Single-beta model, with a constant background.
+ """
+ params = lmfit.Parameters()
+ params.add_many( # (name, value, vary, min, max, expr)
+ ("s0", 1.0e-8, True, 0.0, 1.0e-6, None),
+ ("rc", 30.0, True, 1.0, 1.0e4, None),
+ ("beta", 0.7, True, 0.3, 1.1, None),
+ ("bkg", 1.0e-9, True, 0.0, 1.0e-7, None))
+
+ @staticmethod
+ def sbeta(r, params):
+ parvals = params.valuesdict()
+ s0 = parvals["s0"]
+ rc = parvals["rc"]
+ beta = parvals["beta"]
+ bkg = parvals["bkg"]
+ return s0 * np.power((1 + (r/rc)**2), (0.5 - 3*beta)) + bkg
+
+ def __init__(self):
+ super(self.__class__, self).__init__(name="Single-beta",
+ func=self.sbeta, params=self.params)
+
+ def plot(self, params, xdata, ax):
+ """
+ Plot the fitted model, as well as the fitted parameters.
+ """
+ super(self.__class__, self).plot(params, xdata, ax)
+ ydata = self.sbeta(xdata, params)
+ # fitted paramters
+ ax.vlines(x=params["rc"].value, ymin=min(ydata), ymax=max(ydata),
+ linestyles="dashed")
+ ax.hlines(y=params["bkg"].value, xmin=min(xdata), xmax=max(xdata),
+ linestyles="dashed")
+ ax.text(x=params["rc"].value, y=min(ydata),
+ s="beta: %.2f\nrc: %.2f" % (params["beta"].value,
+ params["rc"].value))
+ ax.text(x=min(xdata), y=min(ydata),
+ s="bkg: %.3e" % params["bkg"].value,
+ verticalalignment="top")
+
+
+class FitModelDBeta(FitModel):
+ """
+ The double-beta model to be fitted.
+ Double-beta model, with a constant background.
+
+ NOTE:
+ the first beta component (s01, rc1, beta1) describes the main and
+ outer SBP; while the second beta component (s02, rc2, beta2) accounts
+ for the central brightness excess.
+ """
+ params = lmfit.Parameters()
+ params.add("s01", value=1.0e-8, min=0.0, max=1.0e-6)
+ params.add("rc1", value=50.0, min=10.0, max=1.0e4)
+ params.add("beta1", value=0.7, min=0.3, max=1.1)
+ #params.add("df_s0", value=1.0e-8, min=0.0, max=1.0e-6)
+ #params.add("s02", expr="s01 + df_s0")
+ params.add("s02", value=1.0e-8, min=0.0, max=1.0e-6)
+ #params.add("df_rc", value=30.0, min=0.0, max=1.0e4)
+ #params.add("rc2", expr="rc1 - df_rc")
+ params.add("rc2", value=20.0, min=1.0, max=5.0e2)
+ params.add("beta2", value=0.7, min=0.3, max=1.1)
+ params.add("bkg", value=1.0e-9, min=0.0, max=1.0e-7)
+
+ @staticmethod
+ def beta1(r, params):
+ """
+ This beta component describes the main/outer part of the SBP.
+ """
+ parvals = params.valuesdict()
+ s01 = parvals["s01"]
+ rc1 = parvals["rc1"]
+ beta1 = parvals["beta1"]
+ bkg = parvals["bkg"]
+ return s01 * np.power((1 + (r/rc1)**2), (0.5 - 3*beta1)) + bkg
+
+ @staticmethod
+ def beta2(r, params):
+ """
+ This beta component describes the central/excess part of the SBP.
+ """
+ parvals = params.valuesdict()
+ s02 = parvals["s02"]
+ rc2 = parvals["rc2"]
+ beta2 = parvals["beta2"]
+ return s02 * np.power((1 + (r/rc2)**2), (0.5 - 3*beta2))
+
+ @classmethod
+ def dbeta(self, r, params):
+ return self.beta1(r, params) + self.beta2(r, params)
+
+ def __init__(self):
+ super(self.__class__, self).__init__(name="Double-beta",
+ func=self.dbeta, params=self.params)
+
+ def plot(self, params, xdata, ax):
+ """
+ Plot the fitted model, and each beta component,
+ as well as the fitted parameters.
+ """
+ super(self.__class__, self).plot(params, xdata, ax)
+ beta1_ydata = self.beta1(xdata, params)
+ beta2_ydata = self.beta2(xdata, params)
+ ax.plot(xdata, beta1_ydata, 'b-.')
+ ax.plot(xdata, beta2_ydata, 'b-.')
+ # fitted paramters
+ ydata = beta1_ydata + beta2_ydata
+ ax.vlines(x=params["rc1"].value, ymin=min(ydata), ymax=max(ydata),
+ linestyles="dashed")
+ ax.vlines(x=params["rc2"].value, ymin=min(ydata), ymax=max(ydata),
+ linestyles="dashed")
+ ax.hlines(y=params["bkg"].value, xmin=min(xdata), xmax=max(xdata),
+ linestyles="dashed")
+ ax.text(x=params["rc1"].value, y=min(ydata),
+ s="beta1: %.2f\nrc1: %.2f" % (params["beta1"].value,
+ params["rc1"].value))
+ ax.text(x=params["rc2"].value, y=min(ydata),
+ s="beta2: %.2f\nrc2: %.2f" % (params["beta2"].value,
+ params["rc2"].value))
+ ax.text(x=min(xdata), y=min(ydata),
+ s="bkg: %.3e" % params["bkg"].value,
+ verticalalignment="top")
+
+
+class FitModelSBetaNorm(FitModel):
+ """
+ The single-beta model to be fitted.
+ Single-beta model, with a constant background.
+ Normalized the `s0' and `bkg' parameters by take the logarithm.
+ """
+ params = lmfit.Parameters()
+ params.add_many( # (name, value, vary, min, max, expr)
+ ("log10_s0", -8.0, True, -12.0, -6.0, None),
+ ("rc", 30.0, True, 1.0, 1.0e4, None),
+ ("beta", 0.7, True, 0.3, 1.1, None),
+ ("log10_bkg", -9.0, True, -12.0, -7.0, None))
+
+ @staticmethod
+ def sbeta(r, params):
+ parvals = params.valuesdict()
+ s0 = 10 ** parvals["log10_s0"]
+ rc = parvals["rc"]
+ beta = parvals["beta"]
+ bkg = 10 ** parvals["log10_bkg"]
+ return s0 * np.power((1 + (r/rc)**2), (0.5 - 3*beta)) + bkg
+
+ def __init__(self):
+ super(self.__class__, self).__init__(name="Single-beta",
+ func=self.sbeta, params=self.params)
+
+ def plot(self, params, xdata, ax):
+ """
+ Plot the fitted model, as well as the fitted parameters.
+ """
+ super(self.__class__, self).plot(params, xdata, ax)
+ ydata = self.sbeta(xdata, params)
+ # fitted paramters
+ ax.vlines(x=params["rc"].value, ymin=min(ydata), ymax=max(ydata),
+ linestyles="dashed")
+ ax.hlines(y=(10 ** params["bkg"].value), xmin=min(xdata),
+ xmax=max(xdata), linestyles="dashed")
+ ax.text(x=params["rc"].value, y=min(ydata),
+ s="beta: %.2f\nrc: %.2f" % (params["beta"].value,
+ params["rc"].value))
+ ax.text(x=min(xdata), y=min(ydata),
+ s="bkg: %.3e" % (10 ** params["bkg"].value),
+ verticalalignment="top")
+
+
+class FitModelDBetaNorm(FitModel):
+ """
+ The double-beta model to be fitted.
+ Double-beta model, with a constant background.
+ Normalized the `s01', `s02' and `bkg' parameters by take the logarithm.
+
+ NOTE:
+ the first beta component (s01, rc1, beta1) describes the main and
+ outer SBP; while the second beta component (s02, rc2, beta2) accounts
+ for the central brightness excess.
+ """
+ params = lmfit.Parameters()
+ params.add("log10_s01", value=-8.0, min=-12.0, max=-6.0)
+ params.add("rc1", value=50.0, min=10.0, max=1.0e4)
+ params.add("beta1", value=0.7, min=0.3, max=1.1)
+ #params.add("df_s0", value=1.0e-8, min=0.0, max=1.0e-6)
+ #params.add("s02", expr="s01 + df_s0")
+ params.add("log10_s02", value=-8.0, min=-12.0, max=-6.0)
+ #params.add("df_rc", value=30.0, min=0.0, max=1.0e4)
+ #params.add("rc2", expr="rc1 - df_rc")
+ params.add("rc2", value=20.0, min=1.0, max=5.0e2)
+ params.add("beta2", value=0.7, min=0.3, max=1.1)
+ params.add("log10_bkg", value=-9.0, min=-12.0, max=-7.0)
+
+ @staticmethod
+ def beta1(r, params):
+ """
+ This beta component describes the main/outer part of the SBP.
+ """
+ parvals = params.valuesdict()
+ s01 = 10 ** parvals["log10_s01"]
+ rc1 = parvals["rc1"]
+ beta1 = parvals["beta1"]
+ bkg = 10 ** parvals["log10_bkg"]
+ return s01 * np.power((1 + (r/rc1)**2), (0.5 - 3*beta1)) + bkg
+
+ @staticmethod
+ def beta2(r, params):
+ """
+ This beta component describes the central/excess part of the SBP.
+ """
+ parvals = params.valuesdict()
+ s02 = 10 ** parvals["log10_s02"]
+ rc2 = parvals["rc2"]
+ beta2 = parvals["beta2"]
+ return s02 * np.power((1 + (r/rc2)**2), (0.5 - 3*beta2))
+
+ @classmethod
+ def dbeta(self, r, params):
+ return self.beta1(r, params) + self.beta2(r, params)
+
+ def __init__(self):
+ super(self.__class__, self).__init__(name="Double-beta",
+ func=self.dbeta, params=self.params)
+
+ def plot(self, params, xdata, ax):
+ """
+ Plot the fitted model, and each beta component,
+ as well as the fitted parameters.
+ """
+ super(self.__class__, self).plot(params, xdata, ax)
+ beta1_ydata = self.beta1(xdata, params)
+ beta2_ydata = self.beta2(xdata, params)
+ ax.plot(xdata, beta1_ydata, 'b-.')
+ ax.plot(xdata, beta2_ydata, 'b-.')
+ # fitted paramters
+ ydata = beta1_ydata + beta2_ydata
+ ax.vlines(x=params["log10_rc1"].value, ymin=min(ydata), ymax=max(ydata),
+ linestyles="dashed")
+ ax.vlines(x=params["rc2"].value, ymin=min(ydata), ymax=max(ydata),
+ linestyles="dashed")
+ ax.hlines(y=(10 ** params["bkg"].value), xmin=min(xdata),
+ xmax=max(xdata), linestyles="dashed")
+ ax.text(x=params["rc1"].value, y=min(ydata),
+ s="beta1: %.2f\nrc1: %.2f" % (params["beta1"].value,
+ params["rc1"].value))
+ ax.text(x=params["rc2"].value, y=min(ydata),
+ s="beta2: %.2f\nrc2: %.2f" % (params["beta2"].value,
+ params["rc2"].value))
+ ax.text(x=min(xdata), y=min(ydata),
+ s="bkg: %.3e" % (10 ** params["bkg"].value),
+ verticalalignment="top")
+
+
+class SbpFit:
+ """
+ Class to handle the SBP fitting with single-/double-beta model.
+ """
+ def __init__(self, model, method="lbfgsb",
+ xdata=None, ydata=None, xerr=None, yerr=None, xunit="pix",
+ name=None, obsid=None, r500_pix=None, r500_kpc=None):
+ self.method = method
+ self.model = model
+ self.load_data(xdata=xdata, ydata=ydata, xerr=xerr, yerr=yerr,
+ xunit=xunit)
+ self.set_source(name=name, obsid=obsid, r500_pix=r500_pix,
+ r500_kpc=r500_kpc)
+
+ def set_source(self, name, obsid=None, r500_pix=None, r500_kpc=None):
+ self.name = name
+ try:
+ self.obsid = int(obsid)
+ except TypeError:
+ self.obsid = None
+ try:
+ self.r500_pix = float(r500_pix)
+ except TypeError:
+ self.r500_pix = None
+ try:
+ self.r500_kpc = float(r500_kpc)
+ except TypeError:
+ self.r500_kpc = None
+ try:
+ self.kpc_per_pix = self.r500_kpc / self.r500_pix
+ except (TypeError, ZeroDivisionError):
+ self.kpc_per_pix = -1
+
+ def load_data(self, xdata, ydata, xerr, yerr, xunit="pix"):
+ self.xdata = xdata
+ self.ydata = ydata
+ self.xerr = xerr
+ self.yerr = yerr
+ if xdata is not None:
+ self.mask = np.ones(xdata.shape, dtype=np.bool)
+ else:
+ self.mask = None
+ if xunit.lower() in ["pix", "pixel"]:
+ self.xunit = "pix"
+ elif xunit.lower() == "kpc":
+ self.xunit = "kpc"
+ else:
+ raise ValueError("invalid xunit: %s" % xunit)
+
+ def ignore_data(self, xmin=None, xmax=None, unit=None):
+ """
+ Ignore the data points within range [xmin, xmax].
+ If xmin is None, then xmin=min(xdata);
+ if xmax is None, then xmax=max(xdata).
+
+ if unit is None, then assume the same unit as `self.xunit'.
+ """
+ if unit is None:
+ unit = self.xunit
+ if xmin is not None:
+ xmin = self.convert_unit(xmin, unit=unit)
+ else:
+ xmin = np.min(self.xdata)
+ if xmax is not None:
+ xmax = self.convert_unit(xmax, unit=unit)
+ else:
+ xmax = np.max(self.xdata)
+ ignore_idx = np.logical_and(self.xdata >= xmin, self.xdata <= xmax)
+ self.mask[ignore_idx] = False
+ # reset `f_residual'
+ self.f_residual = None
+
+ def notice_data(self, xmin=None, xmax=None, unit=None):
+ """
+ Notice the data points within range [xmin, xmax].
+ If xmin is None, then xmin=min(xdata);
+ if xmax is None, then xmax=max(xdata).
+
+ if unit is None, then assume the same unit as `self.xunit'.
+ """
+ if unit is None:
+ unit = self.xunit
+ if xmin is not None:
+ xmin = self.convert_unit(xmin, unit=unit)
+ else:
+ xmin = np.min(self.xdata)
+ if xmax is not None:
+ xmax = self.convert_unit(xmax, unit=unit)
+ else:
+ xmax = np.max(self.xdata)
+ notice_idx = np.logical_and(self.xdata >= xmin, self.xdata <= xmax)
+ self.mask[notice_idx] = True
+ # reset `f_residual'
+ self.f_residual = None
+
+ def convert_unit(self, x, unit):
+ """
+ Convert the value x in given unit to be the unit `self.xunit'
+ """
+ if unit == self.xunit:
+ return x
+ elif (unit == "pix") and (self.xunit == "kpc"):
+ return (x / self.r500_pix * self.r500_kpc)
+ elif (unit == "kpc") and (self.xunit == "pix"):
+ return (x / self.r500_kpc * self.r500_pix)
+ elif (unit == "r500") and (self.xunit == "pix"):
+ return (x * self.r500_pix)
+ elif (unit == "r500") and (self.xunit == "kpc"):
+ return (x * self.r500_kpc)
+ else:
+ raise ValueError("invalid units: %s vs. %s" % (unit, self.xunit))
+
+ def convert_to_r500(self, x, unit=None):
+ """
+ Convert the value x in given unit to be in unit "r500".
+ """
+ if unit is None:
+ unit = self.xunit
+ if unit == "r500":
+ return x
+ elif unit == "pix":
+ return (x / self.r500_pix)
+ elif unit == "kpc":
+ return (x / self.r500_kpc)
+ else:
+ raise ValueError("invalid unit: %s" % unit)
+
+ def set_residual(self):
+ def f_residual(params):
+ if self.yerr is None:
+ return self.model.func(self.xdata[self.mask], params) - \
+ self.ydata
+ else:
+ return (self.model.func(self.xdata[self.mask], params) - \
+ self.ydata[self.mask]) / self.yerr[self.mask]
+ self.f_residual = f_residual
+
+ def fit(self, method=None):
+ if method is None:
+ method = self.method
+ if not hasattr(self, "f_residual") or self.f_residual is None:
+ self.set_residual()
+ self.fitter = lmfit.Minimizer(self.f_residual, self.model.params)
+ self.fitted = self.fitter.minimize(method=method)
+ self.fitted_model = lambda x: self.model.func(x, self.fitted.params)
+
+ def calc_ci(self, sigmas=[0.68, 0.90]):
+ # `conf_interval' requires the fitted results have valid `stderr',
+ # so we need to re-fit the model with method `leastsq'.
+ fitted = self.fitter.minimize(method="leastsq",
+ params=self.fitted.params)
+ self.ci, self.trace = lmfit.conf_interval(self.fitter, fitted,
+ sigmas=sigmas, trace=True)
+
+ def make_results(self):
+ """
+ Make the `self.results' dictionary which contains all the fitting
+ results as well as the confidence intervals.
+ """
+ fitted = self.fitted
+ self.results = OrderedDict()
+ ## fitting results
+ self.results.update(
+ nfev = fitted.nfev,
+ ndata = fitted.ndata,
+ nvarys = fitted.nvarys, # number of varible paramters
+ nfree = fitted.nfree, # degree of freem
+ chisqr = fitted.chisqr,
+ redchi = fitted.redchi,
+ aic = fitted.aic,
+ bic = fitted.bic)
+ params = fitted.params
+ pnames = list(params.keys())
+ pvalues = OrderedDict()
+ for pn in pnames:
+ par = params.get(pn)
+ pvalues[pn] = [par.value, par.min, par.max, par.vary]
+ self.results["params"] = pvalues
+ ## confidence intervals
+ if hasattr(self, "ci") and self.ci is not None:
+ ci = self.ci
+ ci_values = OrderedDict()
+ ci_sigmas = [ "ci%02d" % (v[0]*100) for v in ci.get(pnames[0]) ]
+ ci_names = sorted(list(set(ci_sigmas)))
+ ci_idx = { k: [] for k in ci_names }
+ for cn, idx in zip(ci_sigmas, range(len(ci_sigmas))):
+ ci_idx[cn].append(idx)
+ # parameters ci
+ for pn in pnames:
+ ci_pv = OrderedDict()
+ pv = [ v[1] for v in ci.get(pn) ]
+ # best
+ pv_best = pv[ ci_idx["ci00"][0] ]
+ ci_pv["best"] = pv_best
+ # ci of each sigma
+ pv2 = [ v-pv_best for v in pv ]
+ for cn in ci_names[1:]:
+ ci_pv[cn] = [ pv2[idx] for idx in ci_idx[cn] ]
+ ci_values[pn] = ci_pv
+ self.results["ci"] = ci_values
+
+ def report(self, outfile=sys.stdout):
+ if not hasattr(self, "results") or self.results is None:
+ self.make_results()
+ jd = json.dumps(self.results, indent=2)
+ print(jd, file=outfile)
+
+ def plot(self, ax=None, fig=None, r500_axis=True):
+ """
+ Arguments:
+ * r500_axis: whether to add a second X axis in unit "r500"
+ """
+ if ax is None:
+ fig, ax = plt.subplots(1, 1)
+ # noticed data points
+ eb = ax.errorbar(self.xdata[self.mask], self.ydata[self.mask],
+ xerr=self.xerr[self.mask], yerr=self.yerr[self.mask],
+ fmt="none")
+ # ignored data points
+ ignore_mask = np.logical_not(self.mask)
+ if np.sum(ignore_mask) > 0:
+ eb = ax.errorbar(self.xdata[ignore_mask], self.ydata[ignore_mask],
+ xerr=self.xerr[ignore_mask], yerr=self.yerr[ignore_mask],
+ fmt="none")
+ eb[-1][0].set_linestyle("-.")
+ # fitted model
+ xmax = self.xdata[-1] + self.xerr[-1]
+ xpred = np.power(10, np.linspace(0, np.log10(xmax), 2*len(self.xdata)))
+ ypred = self.fitted_model(xpred)
+ ymin = min(min(self.ydata), min(ypred))
+ ymax = max(max(self.ydata), max(ypred))
+ self.model.plot(params=self.fitted.params, xdata=xpred, ax=ax)
+ ax.set_xscale("log")
+ ax.set_yscale("log")
+ ax.set_xlim(1.0, xmax)
+ ax.set_ylim(ymin/1.2, ymax*1.2)
+ name = self.name
+ if self.obsid is not None:
+ name += "; %s" % self.obsid
+ ax.set_title("Fitted Surface Brightness Profile (%s)" % name)
+ ax.set_xlabel("Radius (%s)" % self.xunit)
+ ax.set_ylabel(r"Surface Brightness (photons/cm$^2$/pixel$^2$/s)")
+ ax.text(x=xmax, y=ymax,
+ s="redchi: %.2f / %.2f = %.2f" % (self.fitted.chisqr,
+ self.fitted.nfree, self.fitted.chisqr/self.fitted.nfree),
+ horizontalalignment="right", verticalalignment="top")
+ plot_ret = [fig, ax]
+ if r500_axis:
+ # Add a second X-axis with labels in unit "r500"
+ # Credit: https://stackoverflow.com/a/28192477/4856091
+ try:
+ ax.title.set_position([0.5, 1.1]) # raise title position
+ ax2 = ax.twiny()
+ # NOTE: the ORDER of the following lines MATTERS
+ ax2.set_xscale(ax.get_xscale())
+ ax2_ticks = ax.get_xticks()
+ ax2.set_xticks(ax2_ticks)
+ ax2.set_xbound(ax.get_xbound())
+ ax2.set_xticklabels([ "%.2g" % self.convert_to_r500(x)
+ for x in ax2_ticks ])
+ ax2.set_xlabel("Radius (r500; r500 = %s pix = %s kpc)" % (\
+ self.r500_pix, self.r500_kpc))
+ ax2.grid(False)
+ plot_ret.append(ax2)
+ except ValueError:
+ # cannot convert X values to unit "r500"
+ pass
+ # automatically adjust layout
+ fig.tight_layout()
+ return plot_ret
+
+
+def make_model(config, modelname):
+ """
+ Make the model with parameters set according to the config.
+ """
+ if modelname == "sbeta":
+ # single-beta model
+ model = FitModelSBeta()
+ elif modelname == "dbeta":
+ # double-beta model
+ model = FitModelDBeta()
+ else:
+ raise ValueError("Invalid model: %s" % modelname)
+ # set initial values and bounds for the model parameters
+ params = config[modelname]["params"]
+ for p, value in params.items():
+ variable = True
+ if len(value) == 4 and value[3].upper() in ["FIXED", "FALSE"]:
+ variable = False
+ model.set_param(name=p, value=float(value[0]),
+ min=float(value[1]), max=float(value[2]), vary=variable)
+ return model
+
+
+def main():
+ # parser for command line options and arguments
+ parser = argparse.ArgumentParser(
+ description="Fit surface brightness profile with " + \
+ "single-/double-beta model",
+ epilog="Version: %s (%s)" % (__version__, __date__))
+ parser.add_argument("-V", "--version", action="version",
+ version="%(prog)s " + "%s (%s)" % (__version__, __date__))
+ parser.add_argument("config", help="Config file for SBP fitting")
+ # exclusive argument group for model selection
+ grp_model = parser.add_mutually_exclusive_group(required=False)
+ grp_model.add_argument("-s", "--sbeta", dest="sbeta",
+ action="store_true", help="single-beta model for SBP")
+ grp_model.add_argument("-d", "--dbeta", dest="dbeta",
+ action="store_true", help="double-beta model for SBP")
+ #
+ args = parser.parse_args()
+
+ config = ConfigObj(args.config)
+
+ # determine the model name
+ if args.sbeta:
+ modelname = "sbeta"
+ elif args.dbeta:
+ modelname = "dbeta"
+ else:
+ modelname = config["model"]
+
+ config_model = config[modelname]
+ # determine the "outfile" and "imgfile"
+ outfile = config.get("outfile")
+ outfile = config_model.get("outfile", outfile)
+ imgfile = config.get("imgfile")
+ imgfile = config_model.get("imgfile", imgfile)
+
+ # SBP fitting model
+ model = make_model(config, modelname=modelname)
+
+ # sbp data and fit object
+ sbpdata = np.loadtxt(config["sbpfile"])
+ sbpfit = SbpFit(model=model, xdata=sbpdata[:, 0], xerr=sbpdata[:, 1],
+ ydata=sbpdata[:, 2], yerr=sbpdata[:, 3],
+ xunit=config.get("unit", "pix"))
+ sbpfit.set_source(name=config["name"], obsid=config.get("obsid"),
+ r500_pix=config.get("r500_pix"), r500_kpc=config.get("r500_kpc"))
+
+ # apply data range ignorance
+ if "ignore" in config.keys():
+ for ig in config.as_list("ignore"):
+ xmin, xmax = map(float, ig.split("-"))
+ sbpfit.ignore_data(xmin=xmin, xmax=xmax)
+ if "ignore_r500" in config.keys():
+ for ig in config.as_list("ignore_r500"):
+ xmin, xmax = map(float, ig.split("-"))
+ sbpfit.ignore_data(xmin=xmin, xmax=xmax, unit="r500")
+
+ # apply additional data range ignorance specified within model section
+ if "ignore" in config_model.keys():
+ for ig in config_model.as_list("ignore"):
+ xmin, xmax = map(float, ig.split("-"))
+ sbpfit.ignore_data(xmin=xmin, xmax=xmax)
+ if "ignore_r500" in config_model.keys():
+ for ig in config_model.as_list("ignore_r500"):
+ xmin, xmax = map(float, ig.split("-"))
+ sbpfit.ignore_data(xmin=xmin, xmax=xmax, unit="r500")
+
+ # fit and calculate confidence intervals
+ sbpfit.fit()
+ sbpfit.calc_ci()
+ sbpfit.report()
+ with open(outfile, "w") as ofile:
+ sbpfit.report(outfile=ofile)
+
+ # make and save a plot
+ fig = Figure(figsize=(10, 8))
+ canvas = FigureCanvas(fig)
+ ax = fig.add_subplot(111)
+ sbpfit.plot(ax=ax, fig=fig, r500_axis=True)
+ fig.savefig(imgfile, dpi=150)
+
+
+if __name__ == "__main__":
+ main()
+
+# vim: set ts=4 sw=4 tw=0 fenc=utf-8 ft=python: #
diff --git a/update_r4_header.sh b/update_r4_header.sh
new file mode 100755
index 0000000..8840a15
--- /dev/null
+++ b/update_r4_header.sh
@@ -0,0 +1,56 @@
+#!/bin/sh
+#
+# Add/Update the header keywords of our sample products corresponding to
+# the "repro-4", by running the `r4_header_update' tool.
+# These new keywords are required by new version (>= v4.6) tools such as
+# `dmcoords', `mkacisrmf', `mkwarf', etc.
+#
+# Reference:
+# [1] ahelp - r4_header_update
+# http://cxc.harvard.edu/ciao/ahelp/r4_header_update.html
+#
+# Aaron LI
+# Created: 2016-03-03
+# Updated: 2016-03-03
+#
+
+
+if [ $# -lt 1 ]; then
+ echo "Usage:"
+ echo " `basename $0` <repro_dir_list>"
+ echo " `basename $0` <repro_dir1> ..."
+ exit 1
+fi
+
+# repro dir's
+if [ -f "$1" ]; then
+ REPRO_LIST="$1"
+elif [ -d "$1" ]; then
+ REPRO_LIST="_tmp_repro_$$.list"
+ [ -f "${REPRO_LIST}" ] && mv -f "${REPRO_LIST}" "${REPRO_LIST}_bak"
+ while [ ! -z "$1" ]; do
+ echo "$1" >> "${REPRO_LIST}"
+ shift
+ done
+else
+ echo "ERROR: invalid arguments: $1"
+ exit 2
+fi
+
+INIT_DIR=`pwd -P`
+cat "${REPRO_LIST}" | while read repro_dir; do
+ cd "${repro_dir}"
+ echo "********* `pwd` *********"
+ PBKFILE=`ls acisf*_pbk0.fits`
+ ASOLLIS=`ls acisf*_asol1.lis`
+ # fix asol.lis if necessary
+ if grep -q '/' "${ASOLLIS}"; then
+ sed -i'_bak' -e 's#^/.*/##' "${ASOLLIS}"
+ fi
+ for f in `find . -type f \( -iname 'acisf*_repro_evt2.fits' -o -iname 'evt2*.fits' -o -iname 'img*.fits' \)`; do
+ echo "* ${f}"
+ r4_header_update infile="${f}" pbkfile="${PBKFILE}" asolfile="@${ASOLLIS}"
+ done
+ cd "${INIT_DIR}"
+done
+