-
Notifications
You must be signed in to change notification settings - Fork 28
Adds a Firefly particle group generator #184
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
fb43eed
2f45cb8
3e0a476
6f9450c
734b0e5
bdb8a51
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,361 @@ | ||
| """ | ||
| FireflyGenerator class and member functions. | ||
|
|
||
| """ | ||
|
|
||
| import numpy as np | ||
| import os | ||
|
|
||
| from yt.loaders import \ | ||
| load | ||
| from yt.data_objects.data_containers import \ | ||
| YTDataContainer | ||
| from yt.data_objects.static_output import \ | ||
| Dataset | ||
| from yt.funcs import \ | ||
| mylog, \ | ||
| YTArray | ||
| from yt.utilities.exceptions import \ | ||
| YTFieldNotFound | ||
| from yt.utilities.on_demand_imports import \ | ||
| _firefly | ||
|
|
||
| from trident.config import \ | ||
| ion_table_dir, \ | ||
| ion_table_file, \ | ||
| ion_table_filepath | ||
| from trident.ion_balance import \ | ||
| add_ion_number_density_field, \ | ||
| atomic_mass | ||
| from trident.line_database import \ | ||
| LineDatabase | ||
|
|
||
| class FireflyGenerator( object ): | ||
| """ | ||
| ## OLD TEXT COMMENTED OUT ## | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Remove commented out docstrings for SpectrumGenerator and replace with description of FireFlyGenerator functionality and arguments. |
||
| ## Keeping it so that I can easily use consistent formatting. ## | ||
| # Preferred class for generating, storing, and plotting absorption-line spectra. | ||
| # SpectrumGenerator is a subclass of yt's AbsorptionSpectrum class | ||
| # with additional functionality like line lists, post-processing to | ||
| # include Milky Way foreground, quasar backgrounds, applying line-spread | ||
| # functions, and plotting. | ||
|
|
||
| # User first specifies the telescope/instrument used for generating spectra | ||
| # (e.g. 'COS' for the Cosmic Origins Spectrograph aboard the | ||
| # Hubble Space Telescope). This can be done by naming the | ||
| # :class:`~trident.Instrument`, or manually setting all of the spectral | ||
| # characteristics including ``lambda_min``, ``lambda_max``, ``lsf_kernel``, | ||
| # and ``n_lambda`` or ``dlambda``. If none of these arguments are set, | ||
| # defaults to 'COS' as the default instrument covering 1150-1450 Angstroms | ||
| # with a binsize (``dlambda``) of 0.1 Angstroms. | ||
|
|
||
| # Once a :class:`~trident.SpectrumGenerator` has been initialized, pass it | ||
| # ``LightRay`` objects using :class:`~trident.SpectrumGenerator.make_spectrum` | ||
| # to actually generate the spectra themselves. Then one can post-process, | ||
| # plot, or save them using | ||
| # :class:`~trident.SpectrumGenerator.add_milky_way_foreground`, | ||
| # :class:`~trident.SpectrumGenerator.add_qso_spectrum`, | ||
| # :class:`~trident.SpectrumGenerator.apply_lsf`, | ||
| # :class:`~trident.SpectrumGenerator.save_spectrum`, and | ||
| # :class:`~trident.SpectrumGenerator.plot_spectrum`. | ||
|
|
||
| # **Parameters** | ||
|
|
||
| # :instrument: string, optional | ||
|
|
||
| # The spectrograph to use. Currently, the only named options are | ||
| # different observing modes of the Cosmic Origins Spectrograph 'COS': | ||
| # 'COS-G130M', 'COS-G160M', and 'COS-G140L' as well as 'COS' which | ||
| # aliases to 'COS-G130M'. These automatically set the ``lambda_min``, | ||
| # ``lambda_max``, ``dlambda`` and ``lsf_kernel``s appropriately. | ||
| # If you're going to set ``lambda_min``, ``lambda_max``, et al manually, | ||
| # leave this set to None. | ||
| # Default: None | ||
|
|
||
| # :lambda_min: float, YTQuantity, or 'auto' | ||
|
|
||
| # lower wavelength bound in angstroms or velocity bound in km/s | ||
| # (if bin_space set to 'velocity'). If set to 'auto', the lower | ||
| # bound will be automatically adjusted to encompass all absorption | ||
| # lines. The window will not be expanded for continuum features, | ||
| # only absorption lines. | ||
|
|
||
| # :lambda_max: float, YTQuantity, or 'auto' | ||
|
|
||
| # upper wavelength bound in angstroms or velocity bound in km/s | ||
| # (if bin_space set to 'velocity'). If set to 'auto', the upper | ||
| # bound will be automatically adjusted to encompass all absorption | ||
| # lines. The window will not be expanded for continuum features, | ||
| # only absorption lines. | ||
|
|
||
| # :n_lambda: int | ||
|
|
||
| # The number of bins in the spectrum (inclusive), so if | ||
| # extrema = 10 and 20, and dlambda (binsize) = 1, then n_lambda = 11. | ||
| # Default: None | ||
|
|
||
| # :dlambda: float | ||
|
|
||
| # size of the wavelength bins in angstroms or velocity bins in km/s. | ||
| # Default: None | ||
|
|
||
| # :bin_space: 'wavelength' or 'velocity' | ||
|
|
||
| # Sets the dimension in which spectra are created. If set to | ||
| # wavelength, the resulting spectra are flux (or tau) vs. observed | ||
| # wavelength. If set to velocity, the spectra are flux vs. | ||
| # velocity offset from the rest wavelength of the absorption line. | ||
| # Default: wavelength | ||
|
|
||
| # :lsf_kernel: string, optional | ||
|
|
||
| # The filename for the LSF kernel. Files are found in | ||
| # trident.__path__/data/lsf_kernels or current working directory. | ||
| # Only necessary if you are applying an LSF to the spectrum in | ||
| # postprocessing. | ||
| # Default: None | ||
|
|
||
| # :line_database: string or :class:`~trident.LineDatabase`, optional | ||
|
|
||
| # A text file listing the various lines to insert into the line database, | ||
| # or a :class:`~trident.LineDatabase` object in memory. The line database | ||
| # provides a list of all possible lines that could be added to the | ||
| # spectrum. For a text file, it should have 4 tab-delimited columns of | ||
| # name (e.g. MgII), wavelength in angstroms, gamma of transition, and | ||
| # f-value of transition. See example datasets in | ||
| # trident.path/data/line_lists for examples. | ||
| # Default: lines.txt | ||
|
|
||
| # :ionization_table: hdf5 file, optional | ||
|
|
||
| # An HDF5 file used for computing the ionization fraction of the gas | ||
| # based on its density, temperature, metallicity, and redshift. If | ||
| # set to None, will use the ion table defined in your Trident config | ||
| # file. | ||
| # Default: None | ||
|
|
||
| # **Example** | ||
|
|
||
| # Create a one-zone ray, and generate a COS spectrum from that ray. | ||
|
|
||
| # >>> import trident | ||
| # >>> ray = trident.make_onezone_ray() | ||
| # >>> sg = trident.SpectrumGenerator('COS') | ||
| # >>> sg.make_spectrum(ray) | ||
| # >>> sg.plot_spectrum('spec_raw.png') | ||
|
|
||
| # Create a one-zone ray at redshift 0.5, and generate a spectrum with 1 | ||
| # angstrom spectral bins from 2000-4000 angstroms, then post-process by | ||
| # adding a MW foreground a QSO background at z=0.5 and add a boxcar line | ||
| # spread function of 100 angstroms width. Plot it and save the figure to | ||
| # 'spec_final.png'. | ||
|
|
||
| # >>> import trident | ||
| # >>> ray = trident.make_onezone_ray(redshift=0.5) | ||
| # >>> sg = trident.SpectrumGenerator(lambda_min=2000, lambda_max=4000, | ||
| # ... dlambda=1) | ||
| # >>> sg.make_spectrum(ray) | ||
| # >>> sg.add_qso_spectrum(emitting_redshift=.5) | ||
| # >>> sg.add_milky_way_foreground() | ||
| # >>> sg.apply_lsf(function='boxcar', width=100) | ||
| # >>> sg.plot_spectrum('spec_final.png') | ||
| """ | ||
| def __init__(self, line_database='lines.txt', | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm guessing that this functionality will only work for particle-based datasets, right? If so, perhaps we need a conditional somewhere at the beginning of this code that checks to see if this is particle-based or grid-based and raises a "Not Implemented error" for grid-based datasets? Or something like that. |
||
| ionization_table=None ): | ||
|
|
||
| if isinstance(line_database, LineDatabase): | ||
| self.line_database = line_database | ||
| else: | ||
| # instantiate the LineDatabase | ||
| self.line_database = LineDatabase(line_database) | ||
|
|
||
| self.observing_redshift = 0. | ||
|
|
||
| if ionization_table is not None: | ||
| # figure out where the user-specified files lives | ||
| if os.path.isfile(ion_table_file): | ||
| self.ionization_table = ion_table_file | ||
| elif os.path.isfile(ion_table_filepath): | ||
| self.ionization_table = ion_table_filepath | ||
| else: | ||
| raise RuntimeError("ionization_table %s is not found in local " | ||
| "directory or in %s" % | ||
| (ion_table_file.split(os.sep)[-1], | ||
| ion_table_dir)) | ||
| else: | ||
| self.ionization_table = None | ||
|
|
||
| def create_particle_group(self, ray, | ||
| UIname = 'ray', | ||
| fields_to_include=None, | ||
| fields_units=None, | ||
| coordinate_units="kpc", | ||
| velocity_units="km/s", | ||
| ion_density_units="cm**-3", | ||
| center = None, | ||
| lines='all', | ||
| size = 0.2, | ||
| ): | ||
|
|
||
| if isinstance(ray, str): | ||
| ray = load(ray) | ||
| if isinstance(ray, Dataset): | ||
| ad = ray.all_data() | ||
| elif isinstance(ray, YTDataContainer): | ||
| ad = ray | ||
| ray = ad.ds | ||
| else: | ||
| raise RuntimeError("Unrecognized ray type.") | ||
|
|
||
| # temporary fix for yt-4.0 ytdata selection issue | ||
| ray.domain_left_edge = ray.domain_left_edge.to('code_length') | ||
| ray.domain_right_edge = ray.domain_right_edge.to('code_length') | ||
|
|
||
| # Get coordinates and velocities | ||
| coordinates = np.array([ | ||
| ad['grid', x_i].in_units( coordinate_units ) for x_i in [ 'x', 'y', 'z' ] | ||
| ]).transpose() * ray.quan( 1, coordinate_units ) | ||
| if center is not None: | ||
| coordinates -= center | ||
| velocities = np.array([ | ||
| ad['grid', 'relative_velocity_' + x_i ].in_units( velocity_units ) for x_i in [ 'x', 'y', 'z' ] | ||
| ]).transpose() * ray.quan( 1, velocity_units ) | ||
|
|
||
| active_ions = self.line_database.parse_subset_to_ions(lines) | ||
|
|
||
| # For storing fields | ||
| field_arrays = [] | ||
| field_names = [] | ||
|
|
||
| # Make sure we've produced all the necessary | ||
| # derived fields if they aren't native to the data | ||
| for i, ion in enumerate( active_ions ): | ||
| # try to determine if field for line is present in dataset | ||
| # if successful, means line.field is in ds.derived_field_list | ||
| # otherwise we probably need to add the field to the dataset | ||
| atom, my_lev = ion | ||
| fname = '{}_p{}_number_density'.format( atom, my_lev-1 ) | ||
| try: | ||
| ad[fname] | ||
| except YTFieldNotFound: | ||
| mylog.info("Creating %s from ray's fields." % ( fname )) | ||
| add_ion_number_density_field(atom, my_lev, ray, | ||
| ionization_table=self.ionization_table) | ||
|
|
||
| field_arrays.append( np.log10( ad[fname].to( ion_density_units ) ) ) | ||
|
|
||
| field_name = ( 'log{}density'.format( lines[i].replace( ' ', '' ) ) ) | ||
| field_names.append( field_name ) | ||
|
|
||
| ## handle default arguments | ||
| if fields_to_include is None: | ||
| fields_to_include = [] | ||
| if fields_units is None: | ||
| fields_units = [] | ||
|
|
||
| ## handle input validation, if any | ||
| if len(fields_units) != len(fields_to_include): | ||
| raise RuntimeError("Each requested field must have units.") | ||
|
|
||
| ## explicitly go after the fields we want | ||
| unavailable_fields = [] | ||
| for field, units in zip(fields_to_include, fields_units): | ||
| ## determine if you want to take the log of the field for Firefly | ||
| log_flag = "log(" in units | ||
|
|
||
| ## read the field array from the dataset | ||
| try: | ||
| this_field_array = ad['grid', field] | ||
| except YTFieldNotFound: | ||
| unavailable_fields.append( field ) | ||
| continue | ||
|
|
||
| ## fix the units string and prepend 'log' to the field for | ||
| ## the UI name | ||
| if log_flag: | ||
| units = units[len("log(") : -1] | ||
| field = f"log{field}" | ||
|
|
||
| ## perform the unit conversion and take the log if | ||
| ## necessary. | ||
| this_field_array.in_units(units) | ||
| if log_flag: | ||
| this_field_array = np.log10(this_field_array) | ||
|
|
||
| ## add this array to the tracked arrays | ||
| field_arrays.append( this_field_array ) | ||
| field_names.append( field ) | ||
|
|
||
| # Print fields skipped because they were unavailable | ||
| if len( unavailable_fields ) > 0: | ||
| print( | ||
| 'For ray, unable to retrieve these fields: {}'.format( | ||
| unavailable_fields | ||
| ) | ||
| ) | ||
|
|
||
| ## create a firefly ParticleGroup for this particle type | ||
| # Include fields if available | ||
| if len( field_arrays ) != 0: | ||
| ParticleGroup_kwargs = { | ||
| 'field_arrays': field_arrays, | ||
| 'field_names': field_names, | ||
| } | ||
| else: | ||
| ParticleGroup_kwargs = {} | ||
| pg = _firefly.data_reader.ParticleGroup( | ||
| UIname=UIname, | ||
| coordinates=coordinates, | ||
| velocities=velocities, | ||
| **ParticleGroup_kwargs | ||
| ) | ||
|
|
||
| pg.settings_default['sizeMult'] = size | ||
|
|
||
| return pg | ||
|
|
||
| def create_particle_group_guideline(self, ray, | ||
| coordinate_units="kpc", | ||
| center = None, | ||
| dx = 1., | ||
| UIname = 'guideline', | ||
| size = 0.3, | ||
| color = np.array([ 1., 1., 1., 1. ]) | ||
| ): | ||
|
|
||
| if isinstance(ray, str): | ||
| ray = load(ray) | ||
| if isinstance(ray, Dataset): | ||
| ad = ray.all_data() | ||
| elif isinstance(ray, YTDataContainer): | ||
| ad = ray | ||
| ray = ad.ds | ||
| else: | ||
| raise RuntimeError("Unrecognized ray type.") | ||
|
|
||
| start = ray.light_ray_solution[0]['start'].to( coordinate_units ) | ||
| end = ray.light_ray_solution[0]['end'].to( coordinate_units ) | ||
|
|
||
| if center is not None: | ||
| start -= center | ||
| end -= center | ||
|
|
||
| guidevec = end - start | ||
| pathlength = np.linalg.norm( guidevec ) | ||
| coordinates = ( | ||
| np.arange( 0, pathlength + dx, dx )[:,np.newaxis] * ( guidevec / pathlength ) | ||
| ) | ||
| coordinates += start | ||
|
|
||
| pg = _firefly.data_reader.ParticleGroup( | ||
| UIname=UIname, | ||
| coordinates=coordinates, | ||
| ) | ||
|
|
||
| pg.settings_default['sizeMult'] = size | ||
| pg.settings_default['color'] = color | ||
| pg.settings_default['color'] = color | ||
| pg.settings_default['blendingMode'] = 'none' | ||
| pg.settings_default['depthTest'] = True | ||
|
|
||
| return pg | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -245,7 +245,8 @@ def add_ion_fields(ds, ions, ftype='gas', | |
| # If line_database is set, then use the underlying file as the line list | ||
| # to select ions from. | ||
| if line_database is not None: | ||
| line_database = LineDatabase(line_database) | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I believe this is leftover in your stack from previous changes you've made on LineTools. Is it possible to strip this out so we just focus on the Firefly functionality? |
||
| if not isinstance( line_database, LineDatabase ): | ||
| line_database = LineDatabase(line_database) | ||
| ion_list = line_database.parse_subset_to_ions(ions) | ||
|
|
||
| # Otherwise, any ion can be selected (not just ones in the line list). | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I believe this is leftover in your stack from previous changes you've made on LineTools. Is it possible to strip this out so we just focus on the Firefly functionality?