Commits

Sam Leitner  committed 12993fc Merge

pulling from tip

  • Participants
  • Parent commits b02cf1a, 1bab987

Comments (0)

Files changed (126)

+stephenskory@yahoo.com = s@skory.us
+"Stephen Skory stephenskory@yahoo.com" = s@skory.us
+yuan@astro.columbia.edu = bear0980@gmail.com
+juxtaposicion@gmail.com = cemoody@ucsc.edu
+chummels@gmail.com = chummels@astro.columbia.edu
+jwise@astro.princeton.edu = jwise@physics.gatech.edu
+atmyers = atmyers@berkeley.edu
+sam.skillman@gmail.com = samskillman@gmail.com
+casey@thestarkeffect.com = caseywstark@gmail.com
+chiffre = chiffre@posteo.de
+Christian Karch = chiffre@posteo.de
 freetype.cfg
 hdf5.cfg
 png.cfg
+yt_updater.log
 yt/frontends/ramses/_ramses_reader.cpp
 yt/utilities/amr_utils.c
 yt/utilities/kdtree/forthonf2c.h
 yt/utilities/lib/RayIntegrators.c
 yt/utilities/lib/VolumeIntegrator.c
 yt/utilities/lib/grid_traversal.c
+yt/utilities/lib/GridTree.c
 yt/utilities/lib/marching_cubes.c
 yt/utilities/lib/png_writer.h
 syntax: glob

File .hgtags

File contents unchanged.

File distribute_setup.py

 This file can also be run as a script to install or upgrade setuptools.
 """
 import os
+import shutil
 import sys
 import time
 import fnmatch
 import tempfile
 import tarfile
+import optparse
+
 from distutils import log
 
 try:
             args = [quote(arg) for arg in args]
         return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
 
-DEFAULT_VERSION = "0.6.21"
+DEFAULT_VERSION = "0.6.32"
 DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
 SETUPTOOLS_FAKED_VERSION = "0.6c11"
 
 """ % SETUPTOOLS_FAKED_VERSION
 
 
-def _install(tarball):
+def _install(tarball, install_args=()):
     # extracting the tarball
     tmpdir = tempfile.mkdtemp()
     log.warn('Extracting in %s', tmpdir)
 
         # installing
         log.warn('Installing Distribute')
-        if not _python_cmd('setup.py', 'install'):
+        if not _python_cmd('setup.py', 'install', *install_args):
             log.warn('Something went wrong during the installation.')
             log.warn('See the error message above.')
+            # exitcode will be 2
+            return 2
     finally:
         os.chdir(old_wd)
+        shutil.rmtree(tmpdir)
 
 
 def _build_egg(egg, tarball, to_dir):
 
     finally:
         os.chdir(old_wd)
+        shutil.rmtree(tmpdir)
     # returning the result
     log.warn(egg)
     if not os.path.exists(egg):
         except ImportError:
             return _do_download(version, download_base, to_dir, download_delay)
         try:
-            pkg_resources.require("distribute>="+version)
+            pkg_resources.require("distribute>=" + version)
             return
         except pkg_resources.VersionConflict:
             e = sys.exc_info()[1]
         if not no_fake:
             _create_fake_setuptools_pkg_info(to_dir)
 
+
 def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
                         to_dir=os.curdir, delay=15):
     """Download distribute from a specified location and return its filename
                 dst.close()
     return os.path.realpath(saveto)
 
+
 def _no_sandbox(function):
     def __no_sandbox(*args, **kw):
         try:
 
     return __no_sandbox
 
+
 def _patch_file(path, content):
     """Will backup the file then patch it"""
     existing_content = open(path).read()
 
 _patch_file = _no_sandbox(_patch_file)
 
+
 def _same_content(path, content):
     return open(path).read() == content
 
+
 def _rename_path(path):
     new_name = path + '.OLD.%s' % time.time()
-    log.warn('Renaming %s into %s', path, new_name)
+    log.warn('Renaming %s to %s', path, new_name)
     os.rename(path, new_name)
     return new_name
 
+
 def _remove_flat_installation(placeholder):
     if not os.path.isdir(placeholder):
         log.warn('Unkown installation at %s', placeholder)
         log.warn('Could not locate setuptools*.egg-info')
         return
 
-    log.warn('Removing elements out of the way...')
+    log.warn('Moving elements out of the way...')
     pkg_info = os.path.join(placeholder, file)
     if os.path.isdir(pkg_info):
         patched = _patch_egg_dir(pkg_info)
 
 _remove_flat_installation = _no_sandbox(_remove_flat_installation)
 
+
 def _after_install(dist):
     log.warn('After install bootstrap.')
     placeholder = dist.get_command_obj('install').install_purelib
     _create_fake_setuptools_pkg_info(placeholder)
 
+
 def _create_fake_setuptools_pkg_info(placeholder):
     if not placeholder or not os.path.exists(placeholder):
         log.warn('Could not find the install location')
         return
 
     log.warn('Creating %s', pkg_info)
-    f = open(pkg_info, 'w')
+    try:
+        f = open(pkg_info, 'w')
+    except EnvironmentError:
+        log.warn("Don't have permissions to write %s, skipping", pkg_info)
+        return
     try:
         f.write(SETUPTOOLS_PKG_INFO)
     finally:
     finally:
         f.close()
 
-_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info)
+_create_fake_setuptools_pkg_info = _no_sandbox(
+    _create_fake_setuptools_pkg_info
+)
+
 
 def _patch_egg_dir(path):
     # let's check if it's already patched
 
 _patch_egg_dir = _no_sandbox(_patch_egg_dir)
 
+
 def _before_install():
     log.warn('Before install bootstrap.')
     _fake_setuptools()
 def _under_prefix(location):
     if 'install' not in sys.argv:
         return True
-    args = sys.argv[sys.argv.index('install')+1:]
+    args = sys.argv[sys.argv.index('install') + 1:]
     for index, arg in enumerate(args):
         for option in ('--root', '--prefix'):
             if arg.startswith('%s=' % option):
                 return location.startswith(top_dir)
             elif arg == option:
                 if len(args) > index:
-                    top_dir = args[index+1]
+                    top_dir = args[index + 1]
                     return location.startswith(top_dir)
         if arg == '--user' and USER_SITE is not None:
             return location.startswith(USER_SITE)
         return
     ws = pkg_resources.working_set
     try:
-        setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools',
-                                  replacement=False))
+        setuptools_dist = ws.find(
+            pkg_resources.Requirement.parse('setuptools', replacement=False)
+            )
     except TypeError:
         # old distribute API
-        setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools'))
+        setuptools_dist = ws.find(
+            pkg_resources.Requirement.parse('setuptools')
+        )
 
     if setuptools_dist is None:
         log.warn('No setuptools distribution found')
         res = _patch_egg_dir(setuptools_location)
         if not res:
             return
-    log.warn('Patched done.')
+    log.warn('Patching complete.')
     _relaunch()
 
 
     log.warn('Relaunching...')
     # we have to relaunch the process
     # pip marker to avoid a relaunch bug
-    if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']:
+    _cmd1 = ['-c', 'install', '--single-version-externally-managed']
+    _cmd2 = ['-c', 'install', '--record']
+    if sys.argv[:3] == _cmd1 or sys.argv[:3] == _cmd2:
         sys.argv[0] = 'setup.py'
     args = [sys.executable] + sys.argv
     sys.exit(subprocess.call(args))
             # Extract directories with a safe mode.
             directories.append(tarinfo)
             tarinfo = copy.copy(tarinfo)
-            tarinfo.mode = 448 # decimal for oct 0700
+            tarinfo.mode = 448  # decimal for oct 0700
         self.extract(tarinfo, path)
 
     # Reverse sort directories.
                 self._dbg(1, "tarfile: %s" % e)
 
 
-def main(argv, version=DEFAULT_VERSION):
+def _build_install_args(options):
+    """
+    Build the arguments to 'python setup.py install' on the distribute package
+    """
+    install_args = []
+    if options.user_install:
+        if sys.version_info < (2, 6):
+            log.warn("--user requires Python 2.6 or later")
+            raise SystemExit(1)
+        install_args.append('--user')
+    return install_args
+
+def _parse_args():
+    """
+    Parse the command line for options
+    """
+    parser = optparse.OptionParser()
+    parser.add_option(
+        '--user', dest='user_install', action='store_true', default=False,
+        help='install in user site package (requires Python 2.6 or later)')
+    parser.add_option(
+        '--download-base', dest='download_base', metavar="URL",
+        default=DEFAULT_URL,
+        help='alternative URL from where to download the distribute package')
+    options, args = parser.parse_args()
+    # positional arguments are ignored
+    return options
+
+def main(version=DEFAULT_VERSION):
     """Install or upgrade setuptools and EasyInstall"""
-    tarball = download_setuptools()
-    _install(tarball)
-
+    options = _parse_args()
+    tarball = download_setuptools(download_base=options.download_base)
+    return _install(tarball, _build_install_args(options))
 
 if __name__ == '__main__':
-    main(sys.argv[1:])
+    sys.exit(main())

File doc/activate

File contents unchanged.

File doc/activate.csh

File contents unchanged.

File doc/install_script.sh

 
 DEST_SUFFIX="yt-`uname -m`"
 DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
-BRANCH="yt" # This is the branch to which we will forcibly update.
+BRANCH="yt-3.0" # This is the branch to which we will forcibly update.
 
 # Here's where you put the HDF5 path if you like; otherwise it'll download it
 # and install it on its own
 INST_PYX=0      # Install PyX?  Sometimes PyX can be problematic without a
                 # working TeX installation.
 INST_0MQ=1      # Install 0mq (for IPython) and affiliated bindings?
+INST_ROCKSTAR=0 # Install the Rockstar halo finder?
 
 # If you've got YT some other place, set this to point to it.
 YT_DIR=""
     ( ${SHASUM} -c $1.sha512 2>&1 ) 1>> ${LOG_FILE} || do_exit
 }
 
+function get_ytdata
+{
+    echo "Downloading $1 from yt-project.org"
+    [ -e $1 ] && return
+    ${GETFILE} "http://yt-project.org/data/$1" || do_exit
+    ( ${SHASUM} -c $1.sha512 2>&1 ) 1>> ${LOG_FILE} || do_exit
+}
+
 ORIG_PWD=`pwd`
 
 if [ -z "${DEST_DIR}" ]
     exit 1
 fi
 
+# Get supplemental data.
+
+mkdir -p ${DEST_DIR}/data
+cd ${DEST_DIR}/data
+echo 'de6d8c6ea849f0206d219303329a0276b3cce7c051eec34377d42aacbe0a4f47ac5145eb08966a338ecddd2b83c8f787ca9956508ad5c39ee2088ad875166410  xray_emissivity.h5' > xray_emissivity.h5.sha512
+get_ytdata xray_emissivity.h5
+
 mkdir -p ${DEST_DIR}/src
 cd ${DEST_DIR}/src
 
 echo 'c13116c1f0547000cc565e15774687b9e884f8b74fb62a84e578408a868a84961704839065ae4f21b662e87f2aaedf6ea424ea58dfa9d3d73c06281f806d15dd  nose-1.2.1.tar.gz' > nose-1.2.1.tar.gz.sha512
 echo '73de2c99406a38f85273931597525cec4ebef55b93712adca3b0bfea8ca3fc99446e5d6495817e9ad55cf4d48feb7fb49734675c4cc8938db8d4a5225d30eca7  python-hglib-0.2.tar.gz' > python-hglib-0.2.tar.gz.sha512
 echo 'ffc602eb346717286b3d0a6770c60b03b578b3cf70ebd12f9e8b1c8c39cdb12ef219ddaa041d7929351a6b02dbb8caf1821b5452d95aae95034cbf4bc9904a7a  sympy-0.7.2.tar.gz' > sympy-0.7.2.tar.gz.sha512
-
+echo '172f2bc671145ebb0add2669c117863db35851fb3bdb192006cd710d4d038e0037497eb39a6d01091cb923f71a7e8982a77b6e80bf71d6275d5d83a363c8d7e5  rockstar-0.99.6.tar.gz' > rockstar-0.99.6.tar.gz.sha512
 # Individual processes
 [ -z "$HDF5_DIR" ] && get_ytproject hdf5-1.8.9.tar.gz
 [ $INST_ZLIB -eq 1 ] && get_ytproject zlib-1.2.3.tar.bz2 
 get_ytproject nose-1.2.1.tar.gz 
 get_ytproject python-hglib-0.2.tar.gz
 get_ytproject sympy-0.7.2.tar.gz
+get_ytproject rockstar-0.99.6.tar.gz
 if [ $INST_BZLIB -eq 1 ]
 then
     if [ ! -e bzip2-1.0.5/done ]
 do_setup_py sympy-0.7.2
 [ $INST_PYX -eq 1 ] && do_setup_py PyX-0.11.1
 
+# Now we build Rockstar and set its environment variable.
+if [ $INST_ROCKSTAR -eq 1 ]
+then
+    if [ ! -e Rockstar/done ]
+    then
+        [ ! -e Rockstar ] && tar xfz rockstar-0.99.6.tar.gz
+        echo "Building Rockstar"
+        cd Rockstar
+        ( make lib 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        cp librockstar.so ${DEST_DIR}/lib
+        ROCKSTAR_DIR=${DEST_DIR}/src/Rockstar
+        echo $ROCKSTAR_DIR > ${YT_DIR}/rockstar.cfg
+        touch done
+        cd ..
+    fi
+fi
+
 echo "Doing yt update, wiping local changes and updating to branch ${BRANCH}"
 MY_PWD=`pwd`
 cd $YT_DIR
 then
     echo "Cloning a copy of Enzo."
     cd ${DEST_DIR}/src/
-    ${HG_EXEC} clone https://enzo.googlecode.com/hg/ ./enzo-hg-stable
+    ${HG_EXEC} clone https://bitbucket.org/enzo/enzo-stable ./enzo-hg-stable
     cd $MY_PWD
 fi
 

File setup.cfg

File contents unchanged.
 import distribute_setup
 distribute_setup.use_setuptools()
 
+from distutils.command.build_py import build_py
 from numpy.distutils.misc_util import appendpath
 from numpy.distutils import log
 from distutils import version
 
 if os.path.exists('MANIFEST'): os.remove('MANIFEST')
 
+def get_mercurial_changeset_id(target_dir):
+    """adapted from a script by Jason F. Harris, published at
+
+    http://jasonfharris.com/blog/2010/05/versioning-your-application-with-the-mercurial-changeset-hash/
+
+    """
+    import subprocess
+    import re
+    get_changeset = subprocess.Popen('hg identify -b -i',
+                                     stdout=subprocess.PIPE,
+                                     stderr=subprocess.PIPE,
+                                     shell=True)
+        
+    if (get_changeset.stderr.read() != ""):
+        print "Error in obtaining current changeset of the Mercurial repository"
+        changeset = None
+        
+    changeset = get_changeset.stdout.read().strip()
+    if (not re.search("^[0-9a-f]{12}", changeset)):
+        print "Current changeset of the Mercurial repository is malformed"
+        changeset = None
+
+    return changeset
+
+class my_build_py(build_py):
+    def run(self):
+        # honor the --dry-run flag
+        if not self.dry_run:
+            target_dir = os.path.join(self.build_lib,'yt')
+            src_dir =  os.getcwd() 
+            changeset = get_mercurial_changeset_id(src_dir)
+            self.mkpath(target_dir)
+            with open(os.path.join(target_dir, '__hg_version__.py'), 'w') as fobj:
+                fobj.write("hg_version = '%s'\n" % changeset)
+
+            build_py.run(self)
 
 def configuration(parent_package='', top_path=None):
     from numpy.distutils.misc_util import Configuration
         configuration=configuration,
         zip_safe=False,
         data_files=REASON_FILES,
+        cmdclass = {'build_py': my_build_py},
         )
     return
 

File yt/__init__.py

   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
 
-__version__ = "2.5-dev"
+__version__ = "3.0-dev"

File yt/analysis_modules/halo_finding/api.py

     FOFHaloFinder, \
     HaloFinder, \
     LoadHaloes, \
-    LoadTextHaloes
+    LoadTextHalos, \
+    LoadTextHaloes, \
+    RockstarHalo, \
+    RockstarHaloList, \
+    LoadRockstarHalos

File yt/analysis_modules/halo_finding/halo_objects.py

 import numpy as np
 import random
 import sys
+import glob
+import os
 import os.path as path
 from collections import defaultdict
 
 
 TINY = 1.e-40
 
-# Ellipsoid funtions.
-# Rotation Matrixes should already be imported at top
-
 class Halo(object):
     """
     A data source that returns particle information about the members of a
     def __init__(self, halo_list, id, indices=None, size=None, CoM=None,
         max_dens_point=None, group_total_mass=None, max_radius=None,
         bulk_vel=None, tasks=None, rms_vel=None, supp=None):
+        self.halo_list = halo_list
         self._max_dens = halo_list._max_dens
         self.id = id
         self.data = halo_list._data_source
             self.supp = {}
         else:
             self.supp = supp
+        self._saved_fields = {}
+        self._ds_sort = None
+        self._particle_mask = None
+
+    @property
+    def particle_mask(self):
+        # Dynamically create the masking array for particles, and get
+        # the data using standard yt methods.
+        if self._particle_mask is not None:
+            return self._particle_mask
+        # This is from disk.
+        pid = self.__getitem__('particle_index')
+        # This is from the sphere.
+        if self._name == "RockstarHalo":
+            ds = self.pf.h.sphere(self.CoM, self._radjust * self.max_radius)
+        elif self._name == "LoadedHalo":
+            ds = self.pf.h.sphere(self.CoM, self._radjust * self.max_radius)
+        sp_pid = ds['particle_index']
+        self._ds_sort = sp_pid.argsort()
+        sp_pid = sp_pid[self._ds_sort]
+        # This matches them up.
+        self._particle_mask = np.in1d(sp_pid, pid)
+        return self._particle_mask
 
     def center_of_mass(self):
         r"""Calculate and return the center of mass.
             e0_vector[2], tilt)
 
 class RockstarHalo(Halo):
-    def __init__(self,halo_list,index,ID, DescID, Mvir, Vmax, Vrms, Rvir, Rs, Np, 
-                  X, Y, Z, VX, VY, VZ, JX, JY, JZ, Spin):
-        """Implement the properties reported by Rockstar: ID, Descendant ID,
-           Mvir, Vmax, Vrms, Rvir, Rs, Np, XYZ, VXYZ, JXYZ, and spin.
-           Most defaults are removed since we don't read in which halos
-           particles belong to. 
-        """
-        #we can still use get_sphere!
-        self.ID = ID #from rockstar
-        self.id = index #index in the halo list
-        self.pf = halo_list.pf
-
-        self.DescID = DescID
-        self.Mvir = Mvir
-        self.Vmax = Vmax
-        self.Vrms = Vrms
-        self.Rvir = Rvir
-        self.Rs   = Rs
-        self.Np   = Np
-        self.X    = X
-        self.Y    = Y
-        self.Z    = Z
-        self.VX   = VX
-        self.VY   = VY
-        self.VZ   = VZ
-        self.JX   = JX
-        self.JY   = JY
-        self.JZ   = JZ
-        self.Spin = Spin
-
-        #Halo.__init__(self,halo_list,index,
-        self.size=Np 
-        self.CoM=np.array([X,Y,Z])
-        self.max_dens_point=-1
-        self.group_total_mass=-1
-        self.max_radius=Rvir
-        self.bulk_vel=np.array([VX,VY,VZ])*1e5
-        self.rms_vel=-1
-        self.group_total_mass = -1 #not implemented 
+    _name = "RockstarHalo"
+    # See particle_mask
+    _radjust = 4.
     
     def maximum_density(self):
         r"""Not implemented."""
         r"""Not implemented."""
         return self.center_of_mass()
 
-    def total_mass(self):
-        r"""Not implemented."""
-        return -1
-
-    def get_size(self):
-        r"""Return the number of particles belonging to the halo."""
-        return self.Np
-
     def write_particle_list(self,handle):
         r"""Not implemented."""
         return -1
 
     def virial_mass(self):
         r"""Virial mass in Msun/h"""
-        return self.Mvir
+        return self.supp['m']
 
     def virial_radius(self):
         r"""Virial radius in Mpc/h comoving"""
-        return self.Rvir
+        return self.supp['r']
 
-    def virial_bin(self):
-        r"""Not implemented"""
-        return -1
+    def __getitem__(self, key):
+        # This function will try to get particle data in one of three ways,
+        # in descending preference.
+        # 1. From saved_fields, e.g. we've already got it.
+        # 2. From the halo binary files off disk.
+        # 3. Use the unique particle indexes of the halo to select a missing
+        # field from an AMR Sphere.
+        if key in self._saved_fields:
+            # We've already got it.
+            return self._saved_fields[key]
+        # Gotta go get it from the Rockstar binary file.
+        if key == 'particle_index':
+            IDs = self._get_particle_data(self.supp['id'],
+                self.halo_list.halo_to_fname, self.size, key)
+            IDs = IDs[IDs.argsort()]
+            self._saved_fields[key] = IDs
+            return self._saved_fields[key]
+        # We won't store this field below in saved_fields because
+        # that would mean keeping two copies of it, one in the yt
+        # machinery and one here.
+        ds = self.pf.h.sphere(self.CoM, 4 * self.max_radius)
+        return np.take(ds[key][self._ds_sort], self.particle_mask)
 
-    def virial_density(self):
-        r"""Not implemented """
-        return -1
-
-    def virial_info(self):
-        r"""Not implemented"""
-        return -1 
-
-    def __getitem__(self,key):
-        r"""Not implemented"""
-        return None
-
+    def _get_particle_data(self, halo, fnames, size, field):
+        # Given a list of file names, a halo, its size, and the desired field,
+        # this returns the particle indices for that halo.
+        file = fnames[halo]
+        mylog.info("Getting %d particles from Rockstar binary file %s.", self.supp['num_p'], file)
+        fp = open(file, 'rb')
+        # We need to skip past the header and all the halos.
+        fp.seek(self.halo_list._header_dt.itemsize + \
+            self.halo_list.fname_halos[file] * \
+            self.halo_list._halo_dt.itemsize, os.SEEK_CUR)
+        # Now we skip ahead to where this halos particles begin.
+        fp.seek(self.supp['p_start'] * 8, os.SEEK_CUR)
+        # And finally, read in the ids.
+        IDs = np.fromfile(fp, dtype=np.int64, count=self.supp['num_p'])
+        fp.close()
+        return IDs
 
     def get_ellipsoid_parameters(self):
         r"""Calculate the parameters that describe the ellipsoid of
 
 
 class LoadedHalo(Halo):
+    _name = "LoadedHalo"
+    # See particle_mask
+    _radjust = 1.05
+
     def __init__(self, pf, id, size=None, CoM=None,
 
         max_dens_point=None, group_total_mass=None, max_radius=None, bulk_vel=None,
         self.fnames = fnames
         self.bin_count = None
         self.overdensity = None
-        self.saved_fields = {}
-        self.particle_mask = None
-        self.ds_sort = None
         self.indices = np.array([])  # Never used for a LoadedHalo.
         # A supplementary data dict.
         if supp is None:
         # 2. From the halo h5 files off disk.
         # 3. Use the unique particle indexes of the halo to select a missing
         # field from an AMR Sphere.
-        try:
+        if key in self._saved_fields:
             # We've already got it.
-            return self.saved_fields[key]
-        except KeyError:
-            # Gotta go get it from the halo h5 files.
-            field_data = self._get_particle_data(self.id, self.fnames,
-                self.size, key)
-            #if key == 'particle_position_x': field_data = None
-            if field_data is not None:
-                self.saved_fields[key] = field_data
-                return self.saved_fields[key]
-            else:
-                # Dynamically create the masking array for particles, and get
-                # the data using standard yt methods. The 1.05 is there to
-                # account for possible silliness having to do with whether
-                # the maximum density or center of mass was used to calculate
-                # the maximum radius.
-                ds = self.pf.h.sphere(self.CoM, 1.05 * self.max_radius)
-                if self.particle_mask is None:
-                    pid = self.__getitem__('particle_index')
-                    sp_pid = ds['particle_index']
-                    self.ds_sort = sp_pid.argsort()
-                    sp_pid = sp_pid[self.ds_sort]
-                    # The result of searchsorted is an array with the positions
-                    # of the indexes in pid as they are in sp_pid. This is
-                    # because each element of pid is in sp_pid only once.
-                    self.particle_mask = np.searchsorted(sp_pid, pid)
-                # We won't store this field below in saved_fields because
-                # that would mean keeping two copies of it, one in the yt
-                # machinery and one here.
-                return ds[key][self.ds_sort][self.particle_mask]
+            return self._saved_fields[key]
+        # Gotta go get it from the halo h5 files.
+        field_data = self._get_particle_data(self.id, self.fnames,
+            self.size, key)
+        if field_data is not None:
+            if key == 'particle_index':
+                field_data = field_data[field_data.argsort()]
+            self._saved_fields[key] = field_data
+            return self._saved_fields[key]
+        # We won't store this field below in saved_fields because
+        # that would mean keeping two copies of it, one in the yt
+        # machinery and one here.
+        ds = self.pf.h.sphere(self.CoM, 1.05 * self.max_radius)
+        return np.take(ds[key][self._ds_sort], self.particle_mask)
 
     def _get_particle_data(self, halo, fnames, size, field):
         # Given a list of file names, a halo, its size, and the desired field,
         f.close()
 
 class RockstarHaloList(HaloList):
-    #because we don't yet no halo-particle affiliations
-    #most of the halo list methods are not implemented
-    #furthermore, Rockstar only accepts DM particles of
-    #a fixed mass, so we don't allow stars at all
-    #Still, we inherit from HaloList because in the future
-    #we might implement halo-particle affiliations
-    def __init__(self,pf,out_list):
+    _name = "Rockstar"
+    _halo_class = RockstarHalo
+    # see io_internal.h in Rockstar.
+    BINARY_HEADER_SIZE=256
+    _header_dt = np.dtype([('magic', np.uint64), ('snap', np.int64),
+        ('chunk', np.int64), ('scale', np.float32), ('Om', np.float32),
+        ('Ol', np.float32), ('h0', np.float32),
+        ('bounds', (np.float32, 6)), ('num_halos', np.int64),
+        ('num_particles', np.int64), ('box_size', np.float32),
+        ('particle_mass', np.float32), ('particle_type', np.int64),
+        ('unused', (np.byte, BINARY_HEADER_SIZE - 4*12 - 8*6))])
+    # see halo.h.
+    _halo_dt = np.dtype([('id', np.int64), ('pos', (np.float32, 6)),
+        ('corevel', (np.float32, 3)), ('bulkvel', (np.float32, 3)),
+        ('m', np.float32), ('r', np.float32), ('child_r', np.float32),
+        ('vmax_r', np.float32), 
+        ('mgrav', np.float32), ('vmax', np.float32),
+        ('rvmax', np.float32), ('rs', np.float32),
+        ('klypin_rs', np.float32), 
+        ('vrms', np.float32), ('J', (np.float32, 3)),
+        ('energy', np.float32), ('spin', np.float32),
+        ('alt_m', (np.float32, 4)), ('Xoff', np.float32),
+        ('Voff', np.float32), ('b_to_a', np.float32),
+        ('c_to_a', np.float32), ('A', (np.float32, 3)),
+        ('bullock_spin', np.float32), ('kin_to_pot', np.float32),
+        ('num_p', np.int64),
+        ('num_child_particles', np.int64), ('p_start', np.int64),
+        ('desc', np.int64), ('flags', np.int64), ('n_core', np.int64),
+        ('min_pos_err', np.float32), ('min_vel_err', np.float32),
+        ('min_bulkvel_err', np.float32), ('padding2', np.float32),])
+    # Above, padding* are due to c byte ordering which pads between
+    # 4 and 8 byte values in the struct as to not overlap memory registers.
+    _tocleanup = ['padding2']
+
+    def __init__(self, pf, out_list):
+        ParallelAnalysisInterface.__init__(self)
         mylog.info("Initializing Rockstar List")
         self._data_source = None
         self._groups = []
         self._max_dens = -1
         self.pf = pf
         self.out_list = out_list
+        self._data_source = pf.h.all_data()
         mylog.info("Parsing Rockstar halo list")
-        self._parse_output(out_list)
+        self._parse_output()
         mylog.info("Finished %s"%out_list)
 
     def _run_finder(self):
     def _get_dm_indices(self):
         pass
 
-    def _parse_output(self,out_list=None):
+    def _get_halos_binary(self, files):
+        """
+        Parse the binary files to get information about halos in higher
+        precision than the text file.
+        """
+        halos = None
+        self.halo_to_fname = {}
+        self.fname_halos = {}
+        for file in files:
+            fp = open(file, 'rb')
+            # read the header
+            header = np.fromfile(fp, dtype=self._header_dt, count=1)
+            # read the halo information
+            new_halos = np.fromfile(fp, dtype=self._halo_dt,
+                count=header['num_halos'])
+            # Record which binary file holds these halos.
+            for halo in new_halos['id']:
+                self.halo_to_fname[halo] = file
+            # Record how many halos are stored in each binary file.
+            self.fname_halos[file] = header['num_halos']
+            # Add to existing.
+            if halos is not None:
+                halos = np.concatenate((new_halos, halos))
+            else:
+                halos = new_halos.copy()
+            fp.close()
+        # Sort them by mass.
+        halos.sort(order='m')
+        halos = np.flipud(halos)
+        return halos
+
+    def _parse_output(self):
         """
         Read the out_*.list text file produced
         by Rockstar into memory."""
         
         pf = self.pf
-
-        if out_list is None:
-            out_list = self.out_list
-
-        lines = open(out_list).readlines()
-        names = []
-        formats = []
-        
-        #find the variables names from the first defining line
-        names = lines[0].replace('#','').split(' ')
-        for j,line in enumerate(lines):
-            if not line.startswith('#'): break
-
-        #find out the table datatypes but evaluating the first data line
-        splits = filter(lambda x: len(x.strip()) > 0 ,line.split(' '))
-        for num in splits:
-            if 'nan' not in num:
-                formats += np.array(eval(num)).dtype,
-            else:
-                formats += np.dtype('float'),
-        assert len(formats) == len(names)
-
+        # In order to read the binary data, we need to figure out which 
+        # binary files belong to this output.
+        basedir = os.path.dirname(self.out_list)
+        s = self.out_list.split('_')[-1]
+        s = s.rstrip('.list')
+        n = int(s)
+        fglob = path.join(basedir, 'halos_%d.*.bin' % n)
+        files = glob.glob(fglob)
+        halos = self._get_halos_binary(files)
         #Jc = 1.98892e33/pf['mpchcm']*1e5
         Jc = 1.0
-        conv = dict(X=1.0/pf['mpchcm'],
-                    Y=1.0/pf['mpchcm'],
-                    Z=1.0/pf['mpchcm'], #to unitary
-                    VX=1e0,VY=1e0,VZ=1e0, #to km/s
-                    Mvir=1.0, #Msun/h
-                    Vmax=1e0,Vrms=1e0,
-                    Rvir=1.0/pf['kpchcm'],
-                    Rs=1.0/pf['kpchcm'],
-                    JX=Jc,JY=Jc,JZ=Jc)
-        dtype = {'names':names,'formats':formats}
-        halo_table = np.loadtxt(out_list,skiprows=j-1,dtype=dtype,comments='#')            
-        #convert position units  
-        for name in names:
-            halo_table[name]=halo_table[name]*conv.get(name,1)
-        
-        for k,row in enumerate(halo_table):
-            args = tuple([val for val in row])
-            halo = RockstarHalo(self,k,*args)
+        length = 1.0 / pf['mpchcm']
+        conv = dict(pos = np.array([length, length, length,
+                                    1, 1, 1]), # to unitary
+                    r=1.0/pf['kpchcm'], # to unitary
+                    rs=1.0/pf['kpchcm'], # to unitary
+                    )
+        #convert units
+        for name in self._halo_dt.names:
+            halos[name]=halos[name]*conv.get(name,1)
+        # Store the halos in the halo list.
+        for i, row in enumerate(halos):
+            supp = {name:row[name] for name in self._halo_dt.names}
+            # Delete the padding columns. 'supp' below will contain
+            # repeated information, but that's OK.
+            for item in self._tocleanup: del supp[item]
+            halo = RockstarHalo(self, i, size=row['num_p'],
+                CoM=row['pos'][0:3], group_total_mass=row['m'],
+                max_radius=row['r'], bulk_vel=row['bulkvel'],
+                rms_vel=row['vrms'], supp=supp)
             self._groups.append(halo)
-    
 
-    #len is ok
-    #iter is OK
-    #getitem is ok
-    #nn is ok I think
-    #nn2d is ok I think
-
-    def write_out(self):
-        pass
     def write_particle_list(self):
         pass
-    
-
-    
 
 class HOPHaloList(HaloList):
 
             3.28392048e14
         """
         TextHaloList.__init__(self, pf, filename, columns, comment)
+
+LoadTextHalos = LoadTextHaloes
+
+class LoadRockstarHalos(GenericHaloFinder, RockstarHaloList):
+    def __init__(self, pf, filename = None):
+        r"""Load Rockstar halos off disk from Rockstar-output format.
+
+        Parameters
+        ----------
+        fname : String
+            The name of the Rockstar file to read in. Default = 
+            "rockstar_halos/out_0.list'.
+
+        Examples
+        --------
+        >>> pf = load("data0005")
+        >>> halos = LoadRockstarHalos(pf, "other_name.out")
+        """
+        if filename is None:
+            filename = 'rockstar_halos/out_0.list'
+        RockstarHaloList.__init__(self, pf, filename)

File yt/analysis_modules/halo_finding/rockstar/rockstar.py

 """
 
 from yt.mods import *
+from yt.utilities.parallel_tools.parallel_analysis_interface import \
+    ParallelAnalysisInterface, ProcessorPool, Communicator
+from yt.analysis_modules.halo_finding.halo_objects import * #Halos & HaloLists
+from yt.config import ytcfg
+
+import rockstar_interface
+
+import socket
+import time
+import threading
+import signal
+import os
 from os import environ
 from os import mkdir
-from yt.utilities.parallel_tools.parallel_analysis_interface import \
-    ParallelAnalysisInterface, ProcessorPool, Communicator
+from os import path
 
-from yt.analysis_modules.halo_finding.halo_objects import * #Halos & HaloLists
-import rockstar_interface
-import socket
-import time
+class InlineRunner(ParallelAnalysisInterface):
+    def __init__(self):
+        # If this is being run inline, num_readers == comm.size, always.
+        psize = ytcfg.getint("yt", "__global_parallel_size")
+        self.num_readers = psize
+        # No choice for you, everyone's a writer too!
+        self.num_writers =  psize
+    
+    def run(self, handler, pool):
+        # If inline, we use forks.
+        server_pid = 0
+        # Start a server on only one machine/fork.
+        if pool.comm.rank == 0:
+            server_pid = os.fork()
+            if server_pid == 0:
+                handler.start_server()
+                os._exit(0)
+        # Start writers on all.
+        writer_pid = 0
+        time.sleep(0.05 + pool.comm.rank/10.0)
+        writer_pid = os.fork()
+        if writer_pid == 0:
+            handler.start_writer()
+            os._exit(0)
+        # Everyone's a reader!
+        time.sleep(0.05 + pool.comm.rank/10.0)
+        handler.start_reader()
+        # Make sure the forks are done, which they should be.
+        if writer_pid != 0:
+            os.waitpid(writer_pid, 0)
+        if server_pid != 0:
+            os.waitpid(server_pid, 0)
 
-class DomainDecomposer(ParallelAnalysisInterface):
-    def __init__(self, pf, comm):
-        ParallelAnalysisInterface.__init__(self, comm=comm)
-        self.pf = pf
-        self.hierarchy = pf.h
-        self.center = (pf.domain_left_edge + pf.domain_right_edge)/2.0
+    def setup_pool(self):
+        pool = ProcessorPool()
+        # Everyone is a reader, and when we're inline, that's all that matters.
+        readers = np.arange(ytcfg.getint("yt", "__global_parallel_size"))
+        pool.add_workgroup(ranks=readers, name="readers")
+        return pool, pool.workgroups[0]
 
-    def decompose(self):
-        dd = self.pf.h.all_data()
-        check, LE, RE, data_source = self.partition_hierarchy_3d(dd)
-        return data_source
+class StandardRunner(ParallelAnalysisInterface):
+    def __init__(self, num_readers, num_writers):
+        self.num_readers = num_readers
+        psize = ytcfg.getint("yt", "__global_parallel_size")
+        if num_writers is None:
+            self.num_writers =  psize - num_readers - 1
+        else:
+            self.num_writers = min(num_writers, psize)
+        if self.num_readers + self.num_writers + 1 != psize:
+            mylog.error('%i reader + %i writers != %i mpi',
+                    self.num_readers, self.num_writers, psize)
+            raise RuntimeError
+    
+    def run(self, handler, wg):
+        # Not inline so we just launch them directly from our MPI threads.
+        if wg.name == "server":
+            handler.start_server()
+        if wg.name == "readers":
+            time.sleep(0.05)
+            handler.start_reader()
+        if wg.name == "writers":
+            time.sleep(0.1)
+            handler.start_writer()
+    
+    def setup_pool(self):
+        pool = ProcessorPool()
+        pool, workgroup = ProcessorPool.from_sizes(
+           [ (1, "server"),
+             (self.num_readers, "readers"),
+             (self.num_writers, "writers") ]
+        )
+        return pool, workgroup
 
 class RockstarHaloFinder(ParallelAnalysisInterface):
-    def __init__(self, pf, num_readers = 1, num_writers = None, 
-            outbase=None,particle_mass=-1.0,overwrite=False,
-            left_edge = None, right_edge = None):
+    def __init__(self, ts, num_readers = 1, num_writers = None,
+            outbase="rockstar_halos", dm_type=1, 
+            force_res=None, total_particles=None, dm_only=False):
+        r"""Spawns the Rockstar Halo finder, distributes dark matter
+        particles and finds halos.
+
+        The halo finder requires dark matter particles of a fixed size.
+        Rockstar has three main processes: reader, writer, and the 
+        server which coordinates reader/writer processes.
+
+        Parameters
+        ----------
+        ts   : TimeSeriesData, StaticOutput
+            This is the data source containing the DM particles. Because 
+            halo IDs may change from one snapshot to the next, the only
+            way to keep a consistent halo ID across time is to feed 
+            Rockstar a set of snapshots, ie, via TimeSeriesData.
+        num_readers: int
+            The number of reader can be increased from the default
+            of 1 in the event that a single snapshot is split among
+            many files. This can help in cases where performance is
+            IO-limited. Default is 1. If run inline, it is
+            equal to the number of MPI threads.
+        num_writers: int
+            The number of writers determines the number of processing threads
+            as well as the number of threads writing output data.
+            The default is set to comm.size-num_readers-1. If run inline,
+            the default is equal to the number of MPI threads.
+        outbase: str
+            This is where the out*list files that Rockstar makes should be
+            placed. Default is 'rockstar_halos'.
+        dm_type: 1
+            In order to exclude stars and other particle types, define
+            the dm_type. Default is 1, as Enzo has the DM particle type=1.
+        force_res: float
+            This parameter specifies the force resolution that Rockstar uses
+            in units of Mpc/h.
+            If no value is provided, this parameter is automatically set to
+            the width of the smallest grid element in the simulation from the
+            last data snapshot (i.e. the one where time has evolved the
+            longest) in the time series:
+            ``pf_last.h.get_smallest_dx() * pf_last['mpch']``.
+        total_particles : int
+            If supplied, this is a pre-calculated total number of dark matter
+            particles present in the simulation. For example, this is useful
+            when analyzing a series of snapshots where the number of dark
+            matter particles should not change and this will save some disk
+            access time. If left unspecified, it will
+            be calculated automatically. Default: ``None``.
+        dm_only : boolean
+            If set to ``True``, it will be assumed that there are only dark
+            matter particles present in the simulation. This can save analysis
+            time if this is indeed the case. Default: ``False``.
+            
+        Returns
+        -------
+        None
+
+        Examples
+        --------
+        To use the script below you must run it using MPI:
+        mpirun -np 3 python test_rockstar.py --parallel
+
+        test_rockstar.py:
+
+        from yt.analysis_modules.halo_finding.rockstar.api import RockstarHaloFinder
+        from yt.mods import *
+        import sys
+
+        ts = TimeSeriesData.from_filenames('/u/cmoody3/data/a*')
+        pm = 7.81769027e+11
+        rh = RockstarHaloFinder(ts)
+        rh.run()
+        """
         ParallelAnalysisInterface.__init__(self)
-        # No subvolume support
-        self.pf = pf
-        self.hierarchy = pf.h
-        if num_writers is None:
-            num_writers = self.comm.size - num_readers -1
-        self.num_readers = num_readers
-        self.num_writers = num_writers
-        self.particle_mass = particle_mass 
-        self.overwrite = overwrite
-        if left_edge is None:
-            left_edge = pf.domain_left_edge
-        if right_edge is None:
-            right_edge = pf.domain_right_edge
-        self.le = left_edge
-        self.re = right_edge
-        if self.num_readers + self.num_writers + 1 != self.comm.size:
-            print '%i reader + %i writers != %i mpi'%\
-                    (self.num_readers, self.num_writers, self.comm.size)
-            raise RuntimeError
-        self.center = (pf.domain_right_edge + pf.domain_left_edge)/2.0
-        data_source = self.pf.h.all_data()
-        self.handler = rockstar_interface.RockstarInterface(
-                self.pf, data_source)
-        if outbase is None:
-            outbase = str(self.pf)+'_rockstar'
-        self.outbase = outbase        
+        # Decide how we're working.
+        if ytcfg.getboolean("yt", "inline") == True:
+            self.runner = InlineRunner()
+        else:
+            self.runner = StandardRunner(num_readers, num_writers)
+        self.num_readers = self.runner.num_readers
+        self.num_writers = self.runner.num_writers
+        mylog.info("Rockstar is using %d readers and %d writers",
+            self.num_readers, self.num_writers)
+        # Note that Rockstar does not support subvolumes.
+        # We assume that all of the snapshots in the time series
+        # use the same domain info as the first snapshots.
+        if not isinstance(ts, TimeSeriesData):
+            ts = TimeSeriesData([ts])
+        self.ts = ts
+        self.dm_type = dm_type
+        self.outbase = outbase
+        if force_res is None:
+            tpf = ts[-1] # Cache a reference
+            self.force_res = tpf.h.get_smallest_dx() * tpf['mpch']
+            # We have to delete now to wipe the hierarchy
+            del tpf
+        else:
+            self.force_res = force_res
+        self.total_particles = total_particles
+        self.dm_only = dm_only
+        # Setup pool and workgroups.
+        self.pool, self.workgroup = self.runner.setup_pool()
+        p = self._setup_parameters(ts)
+        params = self.comm.mpi_bcast(p, root = self.pool['readers'].ranks[0])
+        self.__dict__.update(params)
+        self.handler = rockstar_interface.RockstarInterface(self.ts)
+
+    def _setup_parameters(self, ts):
+        if self.workgroup.name != "readers": return None
+        tpf = ts[0]
+        def _particle_count(field, data):
+            if self.dm_only:
+                return np.prod(data["particle_position_x"].shape)
+            try:
+                return (data["particle_type"]==self.dm_type).sum()
+            except KeyError:
+                return np.prod(data["particle_position_x"].shape)
+        add_field("particle_count", function=_particle_count,
+                  not_in_all=True, particle_type=True)
+        dd = tpf.h.all_data()
+        # Get DM particle mass.
+        all_fields = set(tpf.h.derived_field_list + tpf.h.field_list)
+        for g in tpf.h._get_objs("grids"):
+            if g.NumberOfParticles == 0: continue
+            if self.dm_only:
+                iddm = Ellipsis
+            elif "particle_type" in all_fields:
+                iddm = g["particle_type"] == self.dm_type
+            else:
+                iddm = Ellipsis
+            particle_mass = g['ParticleMassMsun'][iddm][0] / tpf.hubble_constant
+            break
+        p = {}
+        if self.total_particles is None:
+            # Get total_particles in parallel.
+            p['total_particles'] = int(dd.quantities['TotalQuantity']('particle_count')[0])
+        p['left_edge'] = tpf.domain_left_edge
+        p['right_edge'] = tpf.domain_right_edge
+        p['center'] = (tpf.domain_right_edge + tpf.domain_left_edge)/2.0
+        p['particle_mass'] = particle_mass
+        return p
+
+
+    def __del__(self):
+        try:
+            self.pool.free_all()
+        except AttributeError:
+            # This really only acts to cut down on the misleading
+            # error messages when/if this class is called incorrectly
+            # or some other error happens and self.pool hasn't been created
+            # already.
+            pass
 
     def _get_hosts(self):
-        if self.comm.size == 1 or self.workgroup.name == "server":
+        if self.comm.rank == 0 or self.comm.size == 1:
             server_address = socket.gethostname()
             sock = socket.socket()
             sock.bind(('', 0))
         """
         
         """
-        if self.comm.size > 1:
-            self.pool = ProcessorPool()
-            mylog.debug("Num Writers = %s Num Readers = %s",
-                        self.num_writers, self.num_readers)
-            self.pool.add_workgroup(1, name = "server")
-            self.pool.add_workgroup(self.num_readers, name = "readers")
-            self.pool.add_workgroup(self.num_writers, name = "writers")
-            for wg in self.pool.workgroups:
-                if self.comm.rank in wg.ranks: self.workgroup = wg
         if block_ratio != 1:
             raise NotImplementedError
         self._get_hosts()
-        #because rockstar *always* write to exactly the same
-        #out_0.list filename we make a directory for it
-        #to sit inside so it doesn't get accidentally
-        #overwritten 
-        if self.workgroup.name == "server":
-            if not os.path.exists(self.outbase):
-                os.mkdir(self.outbase)
         self.handler.setup_rockstar(self.server_address, self.port,
+                    len(self.ts), self.total_particles, 
+                    self.dm_type,
                     parallel = self.comm.size > 1,
                     num_readers = self.num_readers,
                     num_writers = self.num_writers,
                     writing_port = -1,
                     block_ratio = block_ratio,
                     outbase = self.outbase,
+                    force_res = self.force_res,
                     particle_mass = float(self.particle_mass),
+                    dm_only = int(self.dm_only),
                     **kwargs)
+        # Make the directory to store the halo lists in.
+        if self.comm.rank == 0:
+            if not os.path.exists(self.outbase):
+                os.makedirs(self.outbase)
+            # Make a record of which dataset corresponds to which set of
+            # output files because it will be easy to lose this connection.
+            fp = open(self.outbase + '/pfs.txt', 'w')
+            fp.write("# pfname\tindex\n")
+            for i, pf in enumerate(self.ts):
+                pfloc = path.join(path.relpath(pf.fullpath), pf.basename)
+                line = "%s\t%d\n" % (pfloc, i)
+                fp.write(line)
+            fp.close()
+        # This barrier makes sure the directory exists before it might be used.
+        self.comm.barrier()
         if self.comm.size == 1:
             self.handler.call_rockstar()
         else:
-            self.comm.barrier()
-            if self.workgroup.name == "server":
-                self.handler.start_server()
-            elif self.workgroup.name == "readers":
-                time.sleep(0.1 + self.workgroup.comm.rank/10.0)
-                self.handler.start_client()
-            elif self.workgroup.name == "writers":
-                time.sleep(0.2 + self.workgroup.comm.rank/10.0)
-                self.handler.start_client()
-            self.pool.free_all()
+            # And run it!
+            self.runner.run(self.handler, self.workgroup)
         self.comm.barrier()
-        #quickly rename the out_0.list 
+        self.pool.free_all()
     
     def halo_list(self,file_name='out_0.list'):
         """

File yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx

 cimport cython
 from libc.stdlib cimport malloc
 
+from yt.config import ytcfg
+
 cdef import from "particle.h":
     struct particle:
         np.int64_t id
 cdef import from "config.h":
     void setup_config()
 
-cdef import from "server.h":
+cdef import from "server.h" nogil:
     int server()
+    np.int64_t READER_TYPE
+    np.int64_t WRITER_TYPE
 
-cdef import from "client.h":
-    void client()
+cdef import from "client.h" nogil:
+    void client(np.int64_t in_type)
 
 cdef import from "meta_io.h":
     void read_particles(char *filename)
-    void output_and_free_halos(np.int64_t id_offset, np.int64_t snap, 
+    void output_halos(np.int64_t id_offset, np.int64_t snap, 
 			   np.int64_t chunk, float *bounds)
 
 cdef import from "config_vars.h":
     np.float64_t AVG_PARTICLE_SPACING
     np.int64_t SINGLE_SNAP
 
-def print_rockstar_settings():
-    # We have to do the config
-    print "FILE_FORMAT =", FILE_FORMAT
-    print "PARTICLE_MASS =", PARTICLE_MASS
-
-    print "MASS_DEFINITION =", MASS_DEFINITION
-    print "MIN_HALO_OUTPUT_SIZE =", MIN_HALO_OUTPUT_SIZE
-    print "FORCE_RES =", FORCE_RES
-
-    print "SCALE_NOW =", SCALE_NOW
-    print "h0 =", h0
-    print "Ol =", Ol
-    print "Om =", Om
-
-    print "GADGET_ID_BYTES =", GADGET_ID_BYTES
-    print "GADGET_MASS_CONVERSION =", GADGET_MASS_CONVERSION
-    print "GADGET_LENGTH_CONVERSION =", GADGET_LENGTH_CONVERSION
-    print "GADGET_SKIP_NON_HALO_PARTICLES =", GADGET_SKIP_NON_HALO_PARTICLES
-    print "RESCALE_PARTICLE_MASS =", RESCALE_PARTICLE_MASS
-
-    print "PARALLEL_IO =", PARALLEL_IO
-    print "PARALLEL_IO_SERVER_ADDRESS =", PARALLEL_IO_SERVER_ADDRESS
-    print "PARALLEL_IO_SERVER_PORT =", PARALLEL_IO_SERVER_PORT
-    print "PARALLEL_IO_WRITER_PORT =", PARALLEL_IO_WRITER_PORT
-    print "PARALLEL_IO_SERVER_INTERFACE =", PARALLEL_IO_SERVER_INTERFACE
-    print "RUN_ON_SUCCESS =", RUN_ON_SUCCESS
-
-    print "INBASE =", INBASE
-    print "FILENAME =", FILENAME
-    print "STARTING_SNAP =", STARTING_SNAP
-    print "NUM_SNAPS =", NUM_SNAPS
-    print "NUM_BLOCKS =", NUM_BLOCKS
-    print "NUM_READERS =", NUM_READERS
-    print "PRELOAD_PARTICLES =", PRELOAD_PARTICLES
-    print "SNAPSHOT_NAMES =", SNAPSHOT_NAMES
-    print "LIGHTCONE_ALT_SNAPS =", LIGHTCONE_ALT_SNAPS
-    print "BLOCK_NAMES =", BLOCK_NAMES
-
-    print "OUTBASE =", OUTBASE
-    print "OVERLAP_LENGTH =", OVERLAP_LENGTH
-    print "NUM_WRITERS =", NUM_WRITERS
-    print "FORK_READERS_FROM_WRITERS =", FORK_READERS_FROM_WRITERS
-    print "FORK_PROCESSORS_PER_MACHINE =", FORK_PROCESSORS_PER_MACHINE
-
-    print "OUTPUT_FORMAT =", OUTPUT_FORMAT
-    print "DELETE_BINARY_OUTPUT_AFTER_FINISHED =", DELETE_BINARY_OUTPUT_AFTER_FINISHED
-    print "FULL_PARTICLE_CHUNKS =", FULL_PARTICLE_CHUNKS
-    print "BGC2_SNAPNAMES =", BGC2_SNAPNAMES
-
-    print "BOUND_PROPS =", BOUND_PROPS
-    print "BOUND_OUT_TO_HALO_EDGE =", BOUND_OUT_TO_HALO_EDGE
-    print "DO_MERGER_TREE_ONLY =", DO_MERGER_TREE_ONLY
-    print "IGNORE_PARTICLE_IDS =", IGNORE_PARTICLE_IDS
-    print "TRIM_OVERLAP =", TRIM_OVERLAP
-    print "ROUND_AFTER_TRIM =", ROUND_AFTER_TRIM
-    print "LIGHTCONE =", LIGHTCONE
-    print "PERIODIC =", PERIODIC
-
-    print "LIGHTCONE_ORIGIN =", LIGHTCONE_ORIGIN[0]
-    print "LIGHTCONE_ORIGIN[1] =", LIGHTCONE_ORIGIN[1]
-    print "LIGHTCONE_ORIGIN[2] =", LIGHTCONE_ORIGIN[2]
-    print "LIGHTCONE_ALT_ORIGIN =", LIGHTCONE_ALT_ORIGIN[0]
-    print "LIGHTCONE_ALT_ORIGIN[1] =", LIGHTCONE_ALT_ORIGIN[1]
-    print "LIGHTCONE_ALT_ORIGIN[2] =", LIGHTCONE_ALT_ORIGIN[2]
-
-    print "LIMIT_CENTER =", LIMIT_CENTER[0]
-    print "LIMIT_CENTER[1] =", LIMIT_CENTER[1]
-    print "LIMIT_CENTER[2] =", LIMIT_CENTER[2]
-    print "LIMIT_RADIUS =", LIMIT_RADIUS
-
-    print "SWAP_ENDIANNESS =", SWAP_ENDIANNESS
-    print "GADGET_VARIANT =", GADGET_VARIANT
-
-    print "FOF_FRACTION =", FOF_FRACTION
-    print "FOF_LINKING_LENGTH =", FOF_LINKING_LENGTH
-    print "INCLUDE_HOST_POTENTIAL_RATIO =", INCLUDE_HOST_POTENTIAL_RATIO
-    print "DOUBLE_COUNT_SUBHALO_MASS_RATIO =", DOUBLE_COUNT_SUBHALO_MASS_RATIO
-    print "TEMPORAL_HALO_FINDING =", TEMPORAL_HALO_FINDING
-    print "MIN_HALO_PARTICLES =", MIN_HALO_PARTICLES
-    print "UNBOUND_THRESHOLD =", UNBOUND_THRESHOLD
-    print "ALT_NFW_METRIC =", ALT_NFW_METRIC
-
-    print "TOTAL_PARTICLES =", TOTAL_PARTICLES
-    print "BOX_SIZE =", BOX_SIZE
-    print "OUTPUT_HMAD =", OUTPUT_HMAD
-    print "OUTPUT_PARTICLES =", OUTPUT_PARTICLES
-    print "OUTPUT_LEVELS =", OUTPUT_LEVELS
-    print "DUMP_PARTICLES =", DUMP_PARTICLES[0]
-    print "DUMP_PARTICLES[1] =", DUMP_PARTICLES[1]
-    print "DUMP_PARTICLES[2] =", DUMP_PARTICLES[2]
-
-    print "AVG_PARTICLE_SPACING =", AVG_PARTICLE_SPACING
-    print "SINGLE_SNAP =", SINGLE_SNAP
-
+# Forward declare
 cdef class RockstarInterface
 
-cdef void rh_read_particles(char *filename, particle **p, np.int64_t *num_p):
-    print 'reading from particle filename %s'%filename # should print ./inline.0
+cdef void rh_read_particles(char *filename, particle **p, np.int64_t *num_p) with gil:
+    global SCALE_NOW
     cdef np.float64_t conv[6], left_edge[6]
     cdef np.ndarray[np.int64_t, ndim=1] arri
     cdef np.ndarray[np.float64_t, ndim=1] arr
+    cdef unsigned long long pi,fi,i
+    pf = rh.tsl.next()
+    print 'reading from particle filename %s: %s'%(filename,pf.basename)
     block = int(str(filename).rsplit(".")[-1])
+    n = rh.block_ratio
 
-    # Now we want to grab data from only a subset of the grids.
-    n = rh.block_ratio
-    dd = rh.pf.h.all_data()
-    grids = np.array_split(dd._grids, NUM_BLOCKS)[block]
-    tnpart = 0
-    for g in grids:
-        tnpart += dd._get_data_from_grid(g, "particle_index").size
-    p[0] = <particle *> malloc(sizeof(particle) * tnpart)
-    #print "Loading indices: size = ", tnpart
-    conv[0] = conv[1] = conv[2] = rh.pf["mpchcm"]
+    SCALE_NOW = 1.0/(pf.current_redshift+1.0)
+    # Now we want to grab data from only a subset of the grids for each reader.
+    all_fields = set(pf.h.derived_field_list + pf.h.field_list)
+
+    # First we need to find out how many this reader is going to read in
+    # if the number of readers > 1.
+    if NUM_BLOCKS > 1:
+        local_parts = 0
+        for g in pf.h._get_objs("grids"):
+            if g.NumberOfParticles == 0: continue
+            if rh.dm_only:
+                iddm = Ellipsis
+            elif "particle_type" in all_fields:
+                iddm = g["particle_type"] == rh.dm_type
+            else:
+                iddm = Ellipsis
+            arri = g["particle_index"].astype("int64")
+            arri = arri[iddm] #pick only DM
+            local_parts += arri.size
+    else:
+        local_parts = TOTAL_PARTICLES
+
+    #print "local_parts", local_parts
+
+    p[0] = <particle *> malloc(sizeof(particle) * local_parts)
+
+    conv[0] = conv[1] = conv[2] = pf["mpchcm"]
     conv[3] = conv[4] = conv[5] = 1e-5
-    left_edge[0] = rh.pf.domain_left_edge[0]
-    left_edge[1] = rh.pf.domain_left_edge[1]
-    left_edge[2] = rh.pf.domain_left_edge[2]
+    left_edge[0] = pf.domain_left_edge[0]
+    left_edge[1] = pf.domain_left_edge[1]
+    left_edge[2] = pf.domain_left_edge[2]
     left_edge[3] = left_edge[4] = left_edge[5] = 0.0
     pi = 0
-    for g in grids:
-        arri = dd._get_data_from_grid(g, "particle_index").astype("int64")
+    for g in pf.h._get_objs("grids"):
+        if g.NumberOfParticles == 0: continue
+        if rh.dm_only:
+            iddm = Ellipsis
+        elif "particle_type" in all_fields:
+            iddm = g["particle_type"] == rh.dm_type
+        else:
+            iddm = Ellipsis
+        arri = g["particle_index"].astype("int64")
+        arri = arri[iddm] #pick only DM
         npart = arri.size
         for i in range(npart):
             p[0][i+pi].id = arri[i]
                       "particle_position_z",
                       "particle_velocity_x", "particle_velocity_y",
                       "particle_velocity_z"]:
-            arr = dd._get_data_from_grid(g, field).astype("float64")
+            arr = g[field].astype("float64")
+            arr = arr[iddm] #pick DM
             for i in range(npart):
                 p[0][i+pi].pos[fi] = (arr[i]-left_edge[fi])*conv[fi]
             fi += 1
         pi += npart
-    num_p[0] = tnpart
-    print "Block #%i | Particles %i | Grids %i"%\
-            ( block, pi, len(grids))
+    num_p[0] = local_parts
 
 cdef class RockstarInterface:
 
-    cdef public object pf
     cdef public object data_source
+    cdef public object ts
+    cdef public object tsl
     cdef int rank
     cdef int size
     cdef public int block_ratio
+    cdef public int dm_type
+    cdef public int total_particles
+    cdef public int dm_only
 
-    def __cinit__(self, pf, data_source):
-        self.pf = pf
-        self.data_source = data_source
+    def __cinit__(self, ts):
+        self.ts = ts
+        self.tsl = ts.__iter__() #timseries generator used by read
 
     def setup_rockstar(self, char *server_address, char *server_port,
-                       np.float64_t particle_mass = -1.0,
+                       int num_snaps, np.int64_t total_particles,
+                       int dm_type,
+                       np.float64_t particle_mass,
                        int parallel = False, int num_readers = 1,
                        int num_writers = 1,
                        int writing_port = -1, int block_ratio = 1,
-                       int periodic = 1, int num_snaps = 1,
-                       int min_halo_size = 25, outbase = "None"):
+                       int periodic = 1, force_res=None,
+                       int min_halo_size = 25, outbase = "None",
+                       int dm_only = 0):
         global PARALLEL_IO, PARALLEL_IO_SERVER_ADDRESS, PARALLEL_IO_SERVER_PORT
         global FILENAME, FILE_FORMAT, NUM_SNAPS, STARTING_SNAP, h0, Ol, Om
         global BOX_SIZE, PERIODIC, PARTICLE_MASS, NUM_BLOCKS, NUM_READERS
         global FORK_READERS_FROM_WRITERS, PARALLEL_IO_WRITER_PORT, NUM_WRITERS
         global rh, SCALE_NOW, OUTBASE, MIN_HALO_OUTPUT_SIZE
+        global OVERLAP_LENGTH, TOTAL_PARTICLES, FORCE_RES
+        if force_res is not None:
+            FORCE_RES=np.float64(force_res)
+            #print "set force res to ",FORCE_RES
+        OVERLAP_LENGTH = 0.0
         if parallel:
             PARALLEL_IO = 1
             PARALLEL_IO_SERVER_ADDRESS = server_address
         OUTPUT_FORMAT = "ASCII"
         NUM_SNAPS = num_snaps
         NUM_READERS = num_readers
-        NUM_SNAPS = 1
         NUM_WRITERS = num_writers
         NUM_BLOCKS = num_readers
         MIN_HALO_OUTPUT_SIZE=min_halo_size
+        TOTAL_PARTICLES = total_particles
         self.block_ratio = block_ratio
-
-        h0 = self.pf.hubble_constant
-        Ol = self.pf.omega_lambda
-        Om = self.pf.omega_matter
-        SCALE_NOW = 1.0/(self.pf.current_redshift+1.0)
+        self.dm_only = dm_only
+        
+        tpf = self.ts[0]
+        h0 = tpf.hubble_constant
+        Ol = tpf.omega_lambda
+        Om = tpf.omega_matter
+        SCALE_NOW = 1.0/(tpf.current_redshift+1.0)
         if not outbase =='None'.decode('UTF-8'):
             #output directory. since we can't change the output filenames
             #workaround is to make a new directory
-            print 'using %s as outbase'%outbase
             OUTBASE = outbase 
 
-        if particle_mass < 0:
-            print "Assuming single-mass particle."
-            particle_mass = self.pf.h.grids[0]["ParticleMassMsun"][0] / h0
         PARTICLE_MASS = particle_mass
         PERIODIC = periodic
-        BOX_SIZE = (self.pf.domain_right_edge[0] -
-                    self.pf.domain_left_edge[0]) * self.pf['mpchcm']
+        BOX_SIZE = (tpf.domain_right_edge[0] -
+                    tpf.domain_left_edge[0]) * tpf['mpchcm']
         setup_config()
         rh = self
+        rh.dm_type = dm_type
         cdef LPG func = rh_read_particles
         set_load_particles_generic(func)
 
     def call_rockstar(self):
         read_particles("generic")
         rockstar(NULL, 0)
-        output_and_free_halos(0, 0, 0, NULL)
+        output_halos(0, 0, 0, NULL)
 
     def start_server(self):
-        server()
+        with nogil:
+            server()
 
-    def start_client(self):
-        client()
+    def start_reader(self):
+        cdef np.int64_t in_type = np.int64(READER_TYPE)
+        client(in_type)
+
+    def start_writer(self):
+        cdef np.int64_t in_type = np.int64(WRITER_TYPE)
+        client(in_type)

File yt/analysis_modules/halo_finding/rockstar/setup.py

     config = Configuration('rockstar',parent_package,top_path)
     config.make_config_py() # installs __config__.py
     #config.make_svn_version_py()
-    rd = os.environ["ROCKSTAR_DIR"]
+    try:
+        rd = open("rockstar.cfg").read().strip()
+    except IOError:
+        print "Reading Rockstar location from rockstar.cfg failed."
+        print "Please place the base directory of your"
+        print "Rockstar install in rockstar.cfg and restart."
+        print "(ex: \"echo '/path/to/Rockstar-0.99' > rockstar.cfg\" )"
+        sys.exit(1)
     config.add_extension("rockstar_interface",
                          "yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx",
                          library_dirs=[rd],

File yt/analysis_modules/halo_finding/setup.py

     config.add_subpackage("fof")
     config.add_subpackage("hop")
     config.add_subpackage("parallel_hop")
-    if "ROCKSTAR_DIR" in os.environ:
+    if os.path.exists("rockstar.cfg"):
         config.add_subpackage("rockstar")
     config.make_config_py() # installs __config__.py
     #config.make_svn_version_py()

File yt/analysis_modules/halo_profiler/multi_halo_profiler.py

         # Create output directories.
         self.output_dir = output_dir
         if output_dir is None:
-            output_dir = '.'
+            self.output_dir = self.pf.fullpath
         else:
             self.__check_directory(output_dir)
-        self.output_dir = os.path.join(output_dir, os.path.basename(self.pf.fullpath))
+            self.output_dir = os.path.join(output_dir, self.pf.directory)
         self.__check_directory(self.output_dir)
         self.profile_output_dir = os.path.join(self.output_dir, profile_output_dir)
         self.projection_output_dir = os.path.join(self.output_dir, projection_output_dir)
                 elif self.velocity_center[1] == 'sphere':
                     mylog.info('Calculating sphere bulk velocity.')
                     sphere.set_field_parameter('bulk_velocity',
-                                               sphere.quantities['BulkVelocity']()
+                                               sphere.quantities['BulkVelocity']())
                 else:
                     mylog.error("Invalid parameter: velocity_center.")
                     return None

File yt/analysis_modules/spectral_integrator/api.py

 
 from .spectral_frequency_integrator import \
     SpectralFrequencyIntegrator, \
-    create_table_from_textfiles
+    create_table_from_textfiles, \
+    EmissivityIntegrator, \
+    add_xray_emissivity_field, \
+    add_xray_luminosity_field, \
+    add_xray_photon_emissivity_field

File yt/analysis_modules/spectral_integrator/spectral_frequency_integrator.py

 
 Author: Matthew Turk <matthewturk@gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
+Author: Britton Smith <brittons@origins.colorado.edu>
+Affiliation: Michigan State University
 Homepage: http://yt-project.org/
 License:
-  Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
+  Copyright (C) 2007-2012 Matthew Turk.  All Rights Reserved.