diff options
author | Aaron LI <aly@aaronly.me> | 2017-10-19 10:09:57 +0800 |
---|---|---|
committer | Aaron LI <aly@aaronly.me> | 2017-10-19 10:09:57 +0800 |
commit | e8d45dd2e8365171a9bc6b7cd7c7d6623fa5e013 (patch) | |
tree | c5eac5f3806f17929358691ce441432ddc93c10a /fg21sim | |
parent | e8e7198c67b4c0e62cd419b1ebb8b300a853057d (diff) | |
download | fg21sim-e8d45dd2e8365171a9bc6b7cd7c7d6623fa5e013.tar.bz2 |
clusters/halo: Rewrite halos catalog and raw data save
Replace option "halos_dumpfile" with "halos_catalog_outfile", and use
option "dump_halos_data" to control whether to dump the raw data.
Diffstat (limited to 'fg21sim')
-rw-r--r-- | fg21sim/configs/20-extragalactic.conf.spec | 14 | ||||
-rw-r--r-- | fg21sim/extragalactic/clusters/main.py | 54 |
2 files changed, 50 insertions, 18 deletions
diff --git a/fg21sim/configs/20-extragalactic.conf.spec b/fg21sim/configs/20-extragalactic.conf.spec index 425df88..36d14c9 100644 --- a/fg21sim/configs/20-extragalactic.conf.spec +++ b/fg21sim/configs/20-extragalactic.conf.spec @@ -74,9 +74,17 @@ # to generate images at all frequencies. use_output_catalog = boolean(default=False) - # Output file for dumping the simulated cluster halos data in Python - # native *pickle* format (i.e., .pkl) - halos_dumpfile = string(default=None) + # Output CSV file of the halos catalog containing the calculated + # properties of the simulated halos. + halos_catalog_outfile = string(default=None) + + # Whether to dump the whole data of the simulated halos in Python + # native pickle format (i.e., ".pkl") to a file with the same basename + # as the above ``halos_catalog_outfile``? + # The dumped data also includes the derived electron spectrum for + # each halo, therefore this file can be reloaded back in order to + # calculate the emissions at other frequencies. + dump_halos_data = boolean(default=True) # The minimum mass for clusters when to determine the galaxy clusters # total counts and their distributions. diff --git a/fg21sim/extragalactic/clusters/main.py b/fg21sim/extragalactic/clusters/main.py index 037b983..0c3483d 100644 --- a/fg21sim/extragalactic/clusters/main.py +++ b/fg21sim/extragalactic/clusters/main.py @@ -76,7 +76,9 @@ class GalaxyClusters: comp = self.compID self.catalog_outfile = self.configs.get_path(comp+"/catalog_outfile") self.use_output_catalog = self.configs.getn(comp+"/use_output_catalog") - self.halos_dumpfile = self.configs.get_path(comp+"/halos_dumpfile") + self.halos_catalog_outfile = self.configs.get_path( + comp+"/halos_catalog_outfile") + self.dump_halos_data = self.configs.getn(comp+"/dump_halos_data") self.prefix = self.configs.getn(comp+"/prefix") self.output_dir = self.configs.get_path(comp+"/output_dir") self.merger_mass_min = self.configs.getn(comp+"/merger_mass_min") @@ -280,16 +282,34 @@ class GalaxyClusters: ("Tb_mean", Tb_mean), # [K] ]) self.halos.append(data) - logger.info("Simulated radio halos for merging cluster.") + def _save_halos_catalog(self, outfile=None): + """ + Convert the halos data (``self.halos``) into a Pandas DataFrame + and write into a CSV file. + """ + if outfile is None: + outfile = self.halos_catalog_outfile + logger.info("Converting halos data to be a Pandas DataFrame ...") keys = list(self.halos[0].keys()) # Ignore the ``gamma`` and ``n_e`` items for k in ["gamma", "n_e"]: keys.remove(k) - self.halos_df = dictlist_to_dataframe(self.halos, keys=keys) - logger.info("Done halos data conversion.") + halos_df = dictlist_to_dataframe(self.halos, keys=keys) + dataframe_to_csv(halos_df, outfile, clobber=self.clobber) + logger.info("Saved DataFrame of halos data to file: %s" % outfile) + + def _dump_halos_data(self, outfile=None): + """ + Dump the simulated halos data into Python native pickle format, + making it possible to load the data back to quickly calculate + the emissions at additional frequencies. + """ + if outfile is None: + outfile = os.path.splitext(self.halos_catalog_outfile)[0] + ".pkl" + pickle_dump(self.halos, outfile=outfile, clobber=self.clobber) def _draw_halos(self): """ @@ -438,14 +458,18 @@ class GalaxyClusters: dataframe_to_csv(self.catalog, outfile=self.catalog_outfile, comment=self.catalog_comment, clobber=self.clobber) - # Dump the simulated clusters data - logger.info("Dumping the simulated halos data ...") - if self.halos_dumpfile is None: - logger.warning("Missing dump outfile; skip dump cluster data!") - else: - pickle_dump(self.halos, outfile=self.halos_dumpfile, - clobber=self.clobber) - # Also save converted DataFrame of halos data - outfile = os.path.splitext(self.halos_dumpfile)[0] + ".csv" - dataframe_to_csv(self.halos_df, outfile, clobber=self.clobber) - logger.info("Saved DataFrame of halos data to file: %s" % outfile) + + # Save the simulated halos catalog and raw data + logger.info("Saving the simulated halos catalog and raw data ...") + if self.use_dump_halos_data: + filepath = self.halos_catalog_outfile + os.rename(filepath, filepath+".old") + logger.warning("Backed up halos catalog: %s -> %s" % + (filepath, filepath+".old")) + filepath = os.path.splitext(self.halos_catalog_outfile)[0]+".pkl" + os.rename(filepath, filepath+".old") + logger.warning("Backed up halos data dump file: %s -> %s" % + (filepath, filepath+".old")) + self._save_halos_catalog() + if self.dump_halos_data: + self._dump_halos_data() |