Commits

Britton Smith committed 58bafbd

Moving more simple recipes to docs.

Comments (0)

Files changed (8)

recipes/multi_width_save.py

-"""
-This recipe shows a slightly-fancy way to save a couple plots at a lot of
-different widths, ensuring that across the plots we have the same min/max for
-the colorbar.
-"""
-from yt.mods import *
-
-fn = "RedshiftOutput0005" # parameter file to load
-pf = load(fn) # load data
-
-pc = PlotCollection(pf, "c") # We get our Plot Collection object
-
-# Note that when we save, we will be using string formatting to change all of
-# the bits in here.  You can add more, or remove some, if you like.
-fn = "%(bn)s_%(width)010i_%(unit)s" # template for image file names
-
-# Now let's set up the widths we want to use.
-widths = [ (2, "mpc"), (1000, 'kpc')]
-# We could add on more of these with:
-#  widths += [ ... ]
-
-# Now we add a slice for x and y.
-pc.add_slice("Density", "x")
-pc.add_slice("Density", "y")
-
-# So for all of our widths, we will set the width of the plot and then make
-# sure that our limits for the colorbar are the min/max across the three plots.
-# Then we save!  Each saved file will have a descriptive name, so we can tell
-# them apart.
-
-for width, unit in widths:
-    pc.set_width(width,unit)
-    vmin = min([p.norm.vmin for p in pc.plots])
-    vmax = max([p.norm.vmax for p in pc.plots])
-    pc.set_zlim(vmin,vmax)
-    # This is the string formatting we talked about earlier
-    d = {'bn':pf.basename, 'width':width, 'unit':unit}
-    pc.save(fn % d)
-

recipes/offaxis_projection.py

-"""
-This recipe shows how to make a projection of a dataset from an arbitrary
-projection angle (so you are not confined to the x, y, and z axes).  
-See :ref:`volume_rendering` for more information.
-
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-
-pf = load(fn) # load data
-
-# Now we need a center of our volume to render.  Here we'll just use
-# 0.5,0.5,0.5, because volume renderings are not periodic.
-c = [0.5, 0.5, 0.5]
-
-# Our image plane will be normal to some vector.  For things like collapsing
-# objects, you could set it the way you would a cutting plane -- but for this
-# dataset, we'll just choose an off-axis value at random.  This gets normalized
-# automatically.
-L = [0.5, 0.2, 0.7]
-
-# Our "width" is the width of the image plane as well as the depth -- so we set
-# it to be 0.8 so we get almost the whole domain.  Note that corners may be
-# visible in the output image!
-W = 0.8
-
-# Now we decide how big an image we want.  512x512 should be sufficient.
-N = 512
-
-# Now we call the off_axis_projection function, which handles the rest.
-# Note that we set no_ghost equal to False, so that we *do* include ghost
-# zones in our data.  This takes longer to calculate, but the results look
-# much cleaner than when you ignore the ghost zones.
-# Also note that we set the field which we want to project as "Density", but
-# really we could use any arbitrary field like "Temperature", "Metallicity"
-# or whatever.
-image = off_axis_projection(pf, c, L, W, N, "Density", no_ghost=False)
-
-# Image is now an NxN array representing the intensities of the various pixels.
-# And now, we call our direct image saver.  We save the log of the result.
-write_image(na.log10(image), "%s_offaxis_projection.png" % pf)

recipes/overplot_particles.py

-"""
-This is a simple recipe to show how to open a dataset, plot a projection
-through it, and add particles on top.  For more information see
-:ref:`callbacks`.
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-
-pf = load(fn) # load data
-pc = PlotCollection(pf, "c") # center at middle of domain
-p = pc.add_projection("Density", "x")
-p.modify["particles"](1.0) # 1.0 is the 'width' we want for our slab of
-                            # particles -- this governs the allowable locations
-                            # of particles that show up on the image
-                            # NOTE: we can also supply a *ptype* to cut based
-                            # on a given (integer) particle type
-pc.set_width(1.0, 'unitary') # change width of our plot to the full domain
-pc.save(fn) # save all plots

recipes/sum_mass_in_sphere.py

-"""
-This recipe shows how to take a sphere, centered on the most dense point, and
-sum up the total mass in baryons and particles within that sphere.  Note that
-this recipe will take advantage of multiple CPUs if executed with mpirun and
-supplied the --parallel command line argument.  For more information, see
-:ref:`derived-quantities`.
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-
-pf = load(fn) # load data
-sp = pf.h.sphere("max", (1.0, "mpc"))
-
-baryon_mass, particle_mass = sp.quantities["TotalQuantity"](
-        ["CellMassMsun", "ParticleMassMsun"])
-
-print "Total mass in sphere is %0.5e (gas = %0.5e / particles = %0.5e)" % \
-            (baryon_mass + particle_mass, baryon_mass, particle_mass)

recipes/thin_slice_projection.py

-"""
-This is a simple recipe to show how to open a dataset and then take a
-weighted-average projection through it, but only through a very thin slice of
-the region.  For more information see :ref:`methods-projections`.
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-
-pf = load(fn) # load data
-pc = PlotCollection(pf) # defaults to center at most dense point
-
-# The region object requires a center, a left edge, and a right edge.  We want
-# a thin slice and we're projecting along x, so we'll create a region that
-# fully covers the domain in y and z, but only a portion of it in x.
-region = pf.h.region([0.3, 0.5, 0.5], [0.1, 0.0, 0.0], [0.5, 1.0, 1.0])
-
-pc.add_projection("Density", "x", weight_field="Density", data_source = region) # 0 = x-axis
-pc.save(fn) # save all plots

recipes/time_series_phase.py

-"""
-This is a recipe to sit inside a directory and plot a phase diagram for every
-one of the outputs in that directory.
-
-If run with mpirun and the --parallel flag, this will take advantage of
-multiple processors.
-"""
-from yt.mods import * # set up our namespace
-
-# this means get all the parameter files that it can autodetect and then supply
-# them as parameter file objects to the loop.
-for pf in all_pfs(max_depth=2):
-    # We create a plot collection to hold our plot
-    # If we don't specify the center, it will look for one -- but we don't
-    # really care where it's centered for this plot.
-    pc = PlotCollection(pf, "c")
-
-    # Now we add a phase plot of a sphere with radius 1.0 in code units.
-    # If your domain is not 0..1, then this may not cover it completely.
-    p = pc.add_phase_sphere(1.0, '1', ["Density", "Temperature", "CellMassMsun"],
-                        weight=None,
-                        x_bins=128, x_bounds = (1e-32, 1e-24),
-                        y_bins=128, y_bounds = (1e2, 1e7))
-    # We've over-specified things -- but this will help ensure we have constant
-    # bounds.  lazy_reader gives it the go-ahead to run in parallel, and we
-    # have asked for 128 bins from 1e-32 .. 1e-24 in Density-space and 128 bins
-    # between 1e2 and 1e7 in Temperature space.  This will lead to very fine
-    # points of much lower mass, which is okay.  You can reduce the number of
-    # bins to get more mass in each bin.  Additionally, weight=None means that
-    # no averaging is done -- it just gets summed up, so the value of each bin
-    # will be all the mass residing within that bin.
-
-    # Nowe let's add a title with some fun information.  p is the plot we were
-    # handed previously.  We will add the name of the parameter file and the
-    # current redshift.
-    p.modify["title"]("%s (z = %0.2f)" % (pf, pf["CosmologyCurrentRedshift"]))
-
-    # Now let's save it out.
-    pc.save()#"%s" % pf)

recipes/time_series_quantity.py

-"""
-This is a recipe to sit inside a directory and calculate a quantity for all of
-the outputs in that directory.
-
-If run with mpirun and the --parallel flag, this will take advantage of
-multiple processors.
-"""
-from yt.mods import * # set up our namespace
-
-# First set up our times and quantities lists
-times = []
-values = []
-
-# this means get all the parameter files that it can autodetect and then supply
-# them as parameter file objects to the loop.
-for pf in all_pfs(max_depth=2):
-    # Get the current time, convert to years from code units
-    times.append(pf["InitialTime"] * pf["years"])
-
-    # Now get a box containing the entire dataset
-    data = pf.h.all_data()
-    # Now we calculate the average.  The first argument is the quantity to
-    # average, the second is the weight.
-    # "lazy_reader" has two meanings -- the first is that it will try to
-    # operate on each individual grid, rather than a flattened array of all the
-    # data.  The second is that it will also distribute grids across multiple
-    # processors, if multiple processors are in use.
-    val = data.quantities["WeightedAverageQuantity"](
-            "Temperature", "CellVolume", lazy_reader=True)
-    values.append(val)
-
-# Now we have our values and our time.  We can plot this in pylab!
-
-import pylab
-pylab.semilogy(times, values, '-x')
-pylab.xlabel(r"$Time [years]$")
-pylab.ylabel(r"$\mathrm{H}^{+}\/\/\mathrm{Fraction}$")
-pylab.savefig("average_HII_fraction.png")

recipes/velocity_vectors_on_slice.py

-"""
-This is a simple recipe to show how to open a dataset, plot a slice
-through it, and add velocity vectors on top.
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-
-pf = load(fn) # load data
-pc = PlotCollection(pf) # defaults to center at most dense point
-p = pc.add_slice("Density", 0) # 0 = x-axis
-p.modify["velocity"]() # This takes a few arguments, but we'll use the defaults
-                       # here.  You can control the 'skip' factor in the
-                       # vectors.
-pc.set_width(2.5, 'mpc') # change width of all plots in pc
-pc.save(fn) # save all plots