Commits

Matthew Turk  committed a6919fa

Initial import of a bunch of geometry testing routines

  • Participants

Comments (0)

Files changed (34)

+from yt.mods import *
+import time
+
+pf = load("JHK-DD0030/galaxy0030")
+dd = pf.h.all_data()
+t1 = time.time()
+data = dd["Density"]
+t2 = time.time()
+print data.sum(dtype='float64'), data.shape
+print "%0.3e" % (t2-t1)

File all_functionality.py

+from yt.mods import *
+pf = load("JHK-DD0030/galaxy0030")
+
+#v, c = pf.h.find_max("Density")
+c = [0.5, 0.5, 0.5]
+
+fields = ["dm_density",
+          "particle_type", "CellMassMsun", "x", "Density", "Ones", "Entropy",
+          "particle_position_x", "particle_velocity_x", "ParticleMassMsun",
+         ]
+
+def do(obj):
+    print "GOT", obj._type_name
+    for field in fields:
+        ff = obj[field]
+        print "% 25s % 15i" % (field, ff.size)
+    assert na.all(obj["Density"] == obj["gas","Density"])
+    print
+
+do(pf.h.sphere([0.5,0.5,0.5], (10.0, 'kpc')))
+do(pf.h.ortho_ray(0, (c[x_dict[0]], c[y_dict[0]])))
+do(pf.h.slice(0, c[0]))
+do(pf.h.disk([0.5,0.5,0.5], [0.1, 0.3, 0.6], (10.0,'kpc'), (1.0,'kpc')))
+do(pf.h.cutting([0.1, 0.3, 0.6], [0.5,0.5,0.5]))
+do(pf.h.all_data())
+
+sp = pf.h.sphere(c, (10.0, 'kpc'))
+coords = {'f':{}, 'i':{}}
+for t in ["io", "all", "spatial"]:
+    coords['i'][t] = []
+    coords['f'][t] = []
+    for chunk in sp.chunks(None, t):
+        coords['f'][t].append(chunk.fcoords[:,:])
+        coords['i'][t].append(chunk.icoords[:,:])
+    coords['f'][t] = na.concatenate(coords['f'][t])
+    coords['i'][t] = na.concatenate(coords['i'][t])
+    coords['i'][t].sort()
+    coords['f'][t].sort()
+
+print
+print "="*30
+for t, ic in sorted(coords['f'].items()):
+    print "% 10s % 5.7f % 5.7f % 5.7f" % (t, ic.min(), ic.max(), ic.sum())
+for t, ic in sorted(coords['i'].items()):
+    print "% 10s % 15i % 15i % 15i" % (t, ic.min(), ic.max(), ic.sum())
+
+print "="*30
+assert(coords['i']["io"].size == coords['i']["all"].size)
+assert(coords['i']["io"].size == coords['i']["spatial"].size)
+assert(coords['f']["io"].size == coords['f']["all"].size)
+assert(coords['f']["io"].size == coords['f']["spatial"].size)
+print "Assertions passed."

File bittwiddle.py

+import itertools
+
+i = 105231
+h = 555358
+h = 1146360
+
+#h, i = 1146365, 105231
+#h, i = 555355, 105231
+#h, i = 1790047, 105231
+#i -= 1
+#h += 1
+
+vals = []
+bywise = []
+flippy = []
+for var in [i, h]:
+    a = []
+    tot = 0
+    for j in range(27):
+        v = ((var >> j) & 1)
+        tot += v
+        a.append(str(v))
+    vals.append("".join(a[::-1]))
+    bywise.append( [vals[-1][0::3],
+                    vals[-1][1::3],
+                    vals[-1][2::3]] )
+    flippy.append("".join(itertools.chain(bywise[-1][::-1])))
+    print
+    print "".join(['abc']*9)
+    print vals[-1], tot
+    print "".join(['a']*9 + ['b']*9 + ['c']*9)
+    print
+

File cell_mass.py

+from yt.mods import *
+pf = load("DD0039/output_0039")
+sp = pf.h.sphere("c", (100.0, 'pc'))
+print sp["CellMassMsun"]
+print sp["x"]

File check_all.py

+from yt.mods import *
+pf = load("JHK-DD0030/galaxy0030")
+
+#v, c = pf.h.find_max("Density")
+c = [0.5, 0.5, 0.5]
+
+fields = ["dm_density",
+          "particle_type", "CellMassMsun", "x", "Density", "Ones", "Entropy",
+          "particle_position_x", "particle_velocity_x", "ParticleMassMsun",
+         ]
+
+def do(obj):
+    print "GOT", obj._type_name
+    for field in fields:
+        ff = obj[field]
+        print "% 25s % 15i" % (field, ff.size)
+    assert na.all(obj["Density"] == obj["gas","Density"])
+    print
+
+do(pf.h.sphere([0.5,0.5,0.5], (10.0, 'kpc')))
+do(pf.h.ortho_ray(0, (c[x_dict[0]], c[y_dict[0]])))
+do(pf.h.slice(0, c[0]))
+do(pf.h.disk([0.5,0.5,0.5], [0.1, 0.3, 0.6], (10.0,'kpc'), (1.0,'kpc')))
+do(pf.h.cutting([0.1, 0.3, 0.6], [0.5,0.5,0.5]))
+do(pf.h.all_data())
+
+sp = pf.h.sphere(c, (10.0, 'kpc'))
+coords = {'f':{}, 'i':{}}
+for t in ["io", "all", "spatial"]:
+    coords['i'][t] = []
+    coords['f'][t] = []
+    for chunk in sp.chunks(None, t):
+        coords['f'][t].append(chunk.fcoords[:,:])
+        coords['i'][t].append(chunk.icoords[:,:])
+    coords['f'][t] = na.concatenate(coords['f'][t])
+    coords['i'][t] = na.concatenate(coords['i'][t])
+    coords['i'][t].sort()
+    coords['f'][t].sort()
+
+print
+print "="*30
+for t, ic in sorted(coords['f'].items()):
+    print "% 10s % 5.7f % 5.7f % 5.7f" % (t, ic.min(), ic.max(), ic.sum())
+for t, ic in sorted(coords['i'].items()):
+    print "% 10s % 15i % 15i % 15i" % (t, ic.min(), ic.max(), ic.sum())
+from yt.mods import *
+
+pf = load("DD0039/output_0039")
+sp = pf.h.sphere('c', (1.0, 'kpc'))
+print "Density"
+all = {}
+t1 = time.time()
+for field in ['Density', 'Entropy', 'Ones']:
+    f = sp[field]
+    all[field] = (f.size, f.sum(dtype='float64'))
+all["time"] = time.time() - t1
+
+sp = pf.h.sphere('c', (1.0, 'kpc'))
+grid = {}
+t1 = time.time()
+for field in ['Density', 'Entropy', 'Ones']:
+    grid[field] = (0, 0.0)
+    for rv in sp.chunks(field, "grid"):
+        s, v = grid[field]
+        grid[field] = (rv[field].size + s,
+                        rv[field].sum(dtype="float64") + v)
+grid["time"] = time.time() - t1
+
+sp = pf.h.sphere('c', (1.0, 'kpc'))
+grids = {}
+t1 = time.time()
+for field in ['Density', 'Entropy', 'Ones']:
+    grids[field] = (0, 0.0)
+    for rv in sp.chunks(field, "grids"):
+        s, v = grids[field]
+        grids[field] = (rv[field].size + s,
+                        rv[field].sum(dtype="float64") + v)
+grids["time"] = time.time() - t1
+
+sp = pf.h.sphere('c', (1.0, 'kpc'))
+inner = {}
+t1 = time.time()
+for field in ['Density', 'Entropy', 'Ones']:
+    inner[field] = (0, 0.0)
+for rv in sp.chunks(field, "grids"):
+    for field in ['Density', 'Entropy', 'Ones']:
+        s, v = inner[field]
+        inner[field] = (rv[field].size + s,
+                        rv[field].sum(dtype="float64") + v)
+inner["time"] = time.time() - t1
+print "All-at-once took %0.3e" % (all["time"])
+
+print "Grid-by-grid chunking:"
+for field in ['Density', 'Entropy', 'Ones']:
+    print grid[field][0] - all[field][0],
+    print grid[field][1] / all[field][1]
+print "Took %0.3es" % (grid["time"])
+
+print "Grids-by-grids chunking:"
+for field in ['Density', 'Entropy', 'Ones']:
+    print grids[field][0] - all[field][0],
+    print grids[field][1] / all[field][1]
+print "Took %0.3es" % (grids["time"])
+
+print "Inner-field-read chunking:"
+for field in ['Density', 'Entropy', 'Ones']:
+    print inner[field][0] - all[field][0],
+    print inner[field][1] / all[field][1]
+print "Took %0.3es" % (inner["time"])
+import pylab
+
+def gv(fn, dd):
+    for line in open(fn):
+        if line.startswith("STATS"):
+            vals = line.split()
+            dd[vals[1]] = (float(vals[3]), float(vals[5][:-1]),
+                           float(vals[6][1:-2]))
+
+old = {}
+new = {}
+
+gv("ytrf.out", new)
+#gv("ytnrf.out", old)
+gv("ytrft.out", old)
+
+for k in new:
+    nv = new[k]
+    ov = old[k]
+    print "% 15s:  %0.3es %0.3f(n/o) %0.3e delta % 10i delta cells" % (
+        k, nv[0], nv[0]/ov[0], nv[1]/ov[1], nv[2]-ov[2])

File compare_data_objects.py

+# ( ppbr yt-refactor ; python2.7 compare_data_objects.py  ; ((python2.7
+# compare_data_objects.py 2>&1) | tee ytrf.out) ) ; ( ppbr yt ; python2.7
+# compare_data_objects.py  ; ((python2.7 compare_data_objects.py 2>&1) | tee
+# ytnrf.out) ) ;
+from yt.mods import *
+import time
+import cProfile
+
+pf = load("DD0039/output_0039")
+v, c = pf.h.find_max("Density")
+#c = [0.5, 0.5, 0.5]
+
+import sys
+PROF_NAME="yt"
+for p in sys.path:
+    if "yt-refactor" in p:
+        PROF_NAME="ytrf"
+        break
+
+def cprof(obj):
+    fn = "%s-%s.cprof" % (PROF_NAME, obj._type_name)
+    prof = cProfile.Profile()
+    for g in obj.pf.h.grids: g.clear_data()
+    obj.field_data.clear()
+    obj._grids = None
+    t1 = time.time()
+    prof.enable()
+    dd = obj["Density"]
+    prof.disable()
+    t2 = time.time()
+    print "STATS: %s took %0.3e with %s, %s" % (
+        obj._type_name, t2-t1, dd.sum(dtype="float64"), dd.shape)
+    prof.dump_stats(fn)
+
+cprof(pf.h.ortho_ray(0, (c[x_dict[0]], c[y_dict[0]])))
+cprof(pf.h.slice(0, c[0]))
+cprof(pf.h.sphere([0.5,0.5,0.5], 0.10))
+cprof(pf.h.all_data())
+cprof(pf.h.disk([0.5,0.5,0.5], [0.1, 0.3, 0.6], 0.1, 0.03))
+cprof(pf.h.cutting([0.1, 0.3, 0.6], [0.5,0.5,0.5]))
+from yt.mods import *
+import time
+
+pf = load("JHK-DD0030/galaxy0030")
+dd = pf.h.cutting([0.1, 0.3, 0.6], [0.5,0.5,0.5])
+t1 = time.time()
+data = dd["Density"]
+t2 = time.time()
+print data.sum(dtype='float64'), data.shape
+print "%0.3e" % (t2-t1)
+from yt.mods import *
+
+pf = load("DD0039/output_0039")
+sp = pf.h.sphere('c', (100.0, 'pc'))
+print "Density"
+print sp["Density"]
+print "Ones"
+print sp["Ones"]
+print "Entropy"
+print sp["Entropy"]
+from yt.mods import *
+import time
+
+pf = load("JHK-DD0030/galaxy0030")
+dd = pf.h.disk([0.5,0.5,0.5], [0.1, 0.3, 0.6], 0.1, 0.03)
+t1 = time.time()
+data = dd["Density"]
+t2 = time.time()
+print data.sum(dtype='float64'), data.shape
+print "%0.3e" % (t2-t1)
+from yt.mods import *
+import time
+import cProfile
+from mpi4py import MPI
+
+pf = load("DD0039/output_0039")
+pf.h
+t1 = time.time()
+fn = "fmax-%s.cprof" % (MPI.COMM_WORLD.rank)
+prof = cProfile.Profile()
+prof.enable()
+v, c = pf.h.find_max("Density")
+prof.disable()
+t2 = time.time()
+prof.dump_stats(fn)
+
+print "Took %0.3e" % (t2-t1)

File get_coords.py

+from yt.mods import *
+pf = load("DD0039/output_0039")
+#dd = pf.h.all_data()
+dd = pf.h.sphere("max", (100.0, 'pc'))
+icoords = {}
+for t in ["io", "all", "spatial"]:
+    icoords[t] = []
+    for chunk in dd.chunks(None, t):
+        icoords[t].append(chunk._current_chunk.fcoords(1))
+    icoords[t] = na.concatenate(icoords[t])
+    icoords[t].sort()
+
+for t, ic in sorted(icoords.items()):
+    #print "% 10s % 15i % 15i % 15i" % (t, ic.min(), ic.max(), ic.sum())
+    print "% 10s % 5.7f % 5.7f % 5.7f" % (t, ic.min(), ic.max(), ic.sum())

File join_proj.py

+from yt.mods import *
+from yt.utilities.amr_utils import QuadTree, merge_quadtrees
+import glob
+import h5py
+import time
+
+class PFMock(dict):
+    domain_left_edge = na.zeros(3, dtype='float64')
+    domain_right_edge = na.ones(3, dtype='float64')
+pf = PFMock()
+class ProjMock(dict):
+    pass
+
+NN = 256
+field_lists = dict(
+    unweighted = ["Density", "HII_Density", "HeI_Density",
+                  "Metal_Density", "Ones"],
+    weighted   = ["Density", "Temperature", "HII_Fraction",
+                  "H2I_Fraction", "Metal_Density"]
+)
+
+fspec = r"/scratch/bwoshea/testrun_nest_inline/cycle%08i/%s_*.h5"
+cycles = [int(os.path.basename(f)[-8:]) for f in 
+          glob.glob("/scratch/bwoshea/testrun_nest_inline/cycle*")]
+cycles.sort()
+
+for cycle in parallel_objects(cycles, -1):
+    for t in ["weighted", "unweighted"]:
+        fields = field_lists[t]
+        projs = []
+        output = h5py.File("j_%08i_%s.h5" % (cycle, t), "w")
+
+        for ax in range(3):
+            qt_w = QuadTree(na.ones(2, "int64") * NN, 5)
+            print "Axis", ax
+            t1 = time.time()
+            fns = glob.glob(fspec % (cycle, t))
+            pb = get_pbar("Cycle %08i Axis %s (%s) " % (cycle, ax, t), len(fns))
+            for i, fn in enumerate(sorted(fns)):
+                pb.update(i)
+                f = h5py.File(fn, "r")
+                qt_a = QuadTree(na.ones(2, "int64") * NN, 5)
+                qt_a.frombuffer(f["/axis_%02i/refined" % ax][:],
+                                f["/axis_%02i/values" % ax][:],
+                                f["/axis_%02i/weight" % ax][:], "integrate")
+                merge_quadtrees(qt_w, qt_a)
+                attrs = f["/attributes"].attrs.items()
+                f.close()
+            pb.finish()
+            # Now we construct our dictionary of values
+            coord_data, field_data, weight_data, dxs = [], [], [], []
+            level = 0
+            while 1:
+                npos, nvals, nwvals = qt_w.get_all_from_level(level)
+                if len(npos) == 0: break
+                coord_data.append(npos)
+                nvals /= nwvals[:,None]
+                field_data.append(nvals)
+                weight_data.append(nwvals)
+                dxs.append(1.0/(NN*2.0**level) * na.ones(nvals.shape[0], dtype="float64"))
+                level += 1
+            coord_data = na.concatenate(coord_data, axis=0).transpose()
+            field_data = na.concatenate(field_data, axis=0).transpose()
+            weight_data = na.concatenate(weight_data, axis=0).transpose()
+            dxs = na.concatenate(dxs, axis=0).transpose()
+            proj = ProjMock()
+            proj.pf = pf
+            proj['px'] = (coord_data[0,:] + 0.5) * dxs
+            proj['py'] = (coord_data[1,:] + 0.5) * dxs
+            proj['pdx'] = dxs / 2.0
+            proj['pdy'] = dxs / 2.0
+            proj['weight_field'] = weight_data
+            for field, arr in zip(fields, field_data):
+                proj[field] = arr
+            projs.append(proj)
+            proj.axis = ax
+            output.create_group("/axis_%02i" % ax)
+            for v, ds in proj.items():
+                output.create_dataset("/axis_%02i/%s" % (ax, v), data=ds)
+            print "Took %0.3e, %s" % (time.time() - t1, proj.keys())
+            #frb = FixedResolutionBuffer(proj, (0.0, 1.0, 0.0, 1.0), (4096, 4096))
+            #write_image(na.log10(frb["Density"]), "hi_%02i.png" % ax)
+
+        attr = output.create_group("/attributes")
+        for k, v in attrs:
+            attr.attrs[k] = v
+
+        output.close()
+from yt.mods import *
+import time
+t1 = time.time()
+pf = load("output_00045/info_00045.txt")
+pf.h
+pf.h.proj(0, "Ones")
+t2 = time.time()
+print "Took %0.3e" % (t2-t1)

File ortho_ray.py

+from yt.mods import *
+import time
+
+pf = load("DD0039/output_0039")
+v, c = pf.h.find_max("Density")
+dd = pf.h.ortho_ray(0, (c[x_dict[0]], c[y_dict[0]]))
+t1 = time.time()
+data = dd["Density"]
+t2 = time.time()
+print data.sum(dtype='float64'), data.shape
+print "%0.3e" % (t2-t1)
+from yt.mods import *
+pf = load("JHK-DD0030/galaxy0030")
+
+proj = pf.h.proj(0,"Density")

File plot_slice.py

+import time
+t1 = time.time()
+from yt.mods import *
+pf = load("DD0039/output_0039")
+pc = PlotCollection(pf, 'c')
+pc.add_slice("Density", 0)
+pc.add_cutting_plane("Density", [0.1, 0.2, 0.3])
+pc.save()
+t2 = time.time()
+print "Took %0.3e" % (t2-t1)
+from yt.mods import *
+
+pf = load("DD0039/output_0039")
+pc = PlotCollection(pf, 'c')
+pc.add_profile_sphere(1.0, 'kpc', ["Density", "Temperature"])
+pc.save()
+from yt.mods import *
+import os
+if os.path.exists("DD0039/output_0039.yt"):
+    os.unlink("DD0039/output_0039.yt")
+    print "Deleted old .yt file"
+t1 = time.time()
+pf = load("DD0039/output_0039")
+proj = pf.h.proj(0, "Ones", weight="Ones")
+t2 = time.time()
+
+print "Took %0.3e" % (t2-t1)
+from yt.mods import *
+from yt.utilities.amr_utils import QuadTree
+import time
+t1 = time.time()
+#pf = load("DD0039/output_0039")
+#pf = load("output_00045/info_00045.txt")
+pf = load("output_00700/info_00700.txt")
+#pf = load("output_00007/info_00007.txt")
+axis = 0
+xax = x_dict[axis]
+yax = y_dict[axis]
+dd = pf.h.all_data()
+pf.domain_dimensions[:] = 3
+tree = QuadTree(na.array([pf.domain_dimensions[xax],
+                          pf.domain_dimensions[yax]],
+                          dtype="int64"), nvals=1)
+
+class ProjMock(dict):
+    pass
+
+level_extrema = defaultdict(lambda: (1e30, -1e30))
+for i,chunk in enumerate(dd.chunks(["Density"], "io")):
+    print i, chunk
+    pxs = chunk.icoords[:,xax]
+    pys = chunk.icoords[:,yax]
+    ires = level = chunk.ires
+    pvals = chunk["Density"].astype("float64").reshape((chunk.size, 1))
+    opvals = pvals.copy()
+    pvals /= (2**(level+1))[:,None]
+    wvals = chunk["Density"].astype("float64")
+    #pvals *= wvals[:,None]
+    tree.add_chunk_to_tree(pxs, pys, level, pvals, wvals)
+    for l in na.unique(level):
+        li = (level == l)
+        level_extrema[l] = (min(opvals[li].min(), level_extrema[l][0]),
+                            max(opvals[li].max(), level_extrema[l][1]))
+for l, (mi, ma) in sorted(level_extrema.items()):
+    print "% 2i: %0.3e %0.3e" % (l, mi, ma)
+t2 = time.time()
+
+print "Took %0.3e" % (t2-t1)
+
+coord_data, field_data, weight_data, dxs = [], [], [], []
+level = 0
+for level in range(pf.max_level + 1):
+    npos, nvals, nwvals = tree.get_all_from_level(level)
+    coord_data.append(npos)
+    #nvals /= nwvals[:,None]
+    field_data.append(nvals)
+    weight_data.append(nwvals)
+    dxs.append(1.0/(2.0**(level+1)) * na.ones(nvals.shape[0], dtype="float64"))
+    if nvals.shape[0] > 0:
+        print "% 2i: %0.3e %0.3e" % (
+            level, field_data[-1].min(), field_data[-1].max())
+    level += 1
+coord_data = na.concatenate(coord_data, axis=0).transpose()
+field_data = na.concatenate(field_data, axis=0).transpose()
+weight_data = na.concatenate(weight_data, axis=0).transpose()
+dxs = na.concatenate(dxs, axis=0).transpose()
+proj = ProjMock()
+proj.pf = pf
+proj['px'] = (coord_data[0,:] + 0.5) * dxs
+proj['py'] = (coord_data[1,:] + 0.5) * dxs
+proj['pdx'] = dxs / 2.0
+proj['pdy'] = dxs / 2.0
+proj['weight_field'] = weight_data
+fields = ["Density"]
+for field, arr in zip(fields, field_data):
+    proj[field] = arr
+proj.axis = 0
+frb = FixedResolutionBuffer(proj, (0.0, 1.0, 0.0, 1.0), (1024, 1024))
+write_image(na.log10(frb["Density"]), "first_light.png")

File quantities.py

+from yt.mods import *
+
+pf = load("DD0039/output_0039")
+dd = pf.h.all_data()
+print dd.quantities["Extrema"]("Density")

File read_ramses_data.py

+import yt.utilities.fortran_utils as fpu
+import numpy as na
+from yt.mods import *
+
+pf = load("output_00007/info_00007.txt")
+f = open("output_00007/hydro_00007.out00001", "rb")
+fpu.skip(f, 6) # header
+header = ( ('file_ilevel', 1, 'I'),
+           ('file_ncache', 1, 'I') )
+hvals = fpu.read_attrs(f, header)
+print hvals
+
+for i in range(pf['levelmax']):
+    for n in range(8):
+        for j in range(6):
+            vec = fpu.read_vector(f, "d")
+            print i, n, j, vec.min(), vec.max()

File read_ramses_header.py

+import struct
+import numpy as na
+import pprint
+import os
+from yt.utilities._amr_utils.geometry_utils import \
+    get_hilbert_points, \
+    get_hilbert_indices
+from yt.utilities.fortran_utils import read_attrs, read_vector, skip
+
+hvals = {}
+
+f1 = open("output_00007/amr_00007.out00001")
+header = ( ('ncpu', 1, 'i'),
+           ('ndim', 1, 'i'),
+           ('nx', 3, 'i'),
+           ('nlevelmax', 1, 'i'),
+           ('ngridmax', 1, 'i'),
+           ('nboundary', 1, 'i'),
+           ('ngrid_current', 1, 'i'),
+           ('boxlen', 1, 'd'),
+           ('nout', 3, 'I')
+          )
+
+hvals.update(read_attrs(f1, header))
+
+noutput, iout, ifout = hvals['nout']
+
+next_set = ( ('tout', noutput, 'd'),
+             ('aout', noutput, 'd'),
+             ('t', 1, 'd'),
+             ('dtold', hvals['nlevelmax'], 'd'),
+             ('dtnew', hvals['nlevelmax'], 'd'),
+             ('nstep',  2, 'i'),
+             ('stat', 3, 'd'),
+             ('cosm', 7, 'd'),
+             ('timing', 5, 'd'),
+             ('mass_sph', 1, 'd') )
+
+hvals.update(read_attrs(f1, next_set))
+
+tree_header = ( ('headl', hvals['nlevelmax'], 'i'),
+                ('taill', hvals['nlevelmax'], 'i'),
+                ('numbl', hvals['nlevelmax'], 'i'),
+              )
+hvals.update(read_attrs(f1, tree_header))
+skip(f1)
+
+for i in range(hvals['nboundary']):
+    skip(f1, 2) # headb, tailb
+    skip(f1) # grid boundary
+
+next_set = ( ('free_mem', 5, 'i'), )
+hvals.update(read_attrs(f1, next_set))
+
+hvals['ordering'] = "".join(read_vector(f1, 'c')).strip()
+skip(f1, 4) 
+
+pointers = {}
+begin = f1.tell()
+f1.seek(0, os.SEEK_END)
+end = f1.tell()
+f1.seek(begin)
+for ilvl in range(hvals['nlevelmax']):
+    cur = f1.tell()
+    if cur == end: break
+    print "HANDLING LEVEL %s, %s from end" % (ilvl, end - cur)
+    vv = pointers[ilvl] = {}
+    ng = hvals['numbl'][ilvl]
+    ind = na.empty(ng, dtype='int64')
+    ind[:] = read_vector(f1, "I")
+    vv['ind'] = ind 
+    # Skip vv to next / prev
+    skip(f1, 2)
+    # Let's break this down.  Oliver skips 3+3+6+8+8+8
+    # I think this is:
+    #   center (N,3) double
+    #   parent indices (N) int
+    #   neighbor indices (N, 8) int
+    #   child indices (N, 8) int
+    #   cpu_map (N, 8) int
+    pos = na.empty((ng, 3), dtype='float64')
+    pos[:,0] = read_vector(f1, "d") # center_x
+    pos[:,1] = read_vector(f1, "d") # center_y
+    pos[:,2] = read_vector(f1, "d") # center_z
+    vv['pos'] = pos
+    vv['parents'] = read_vector(f1, "I") # parents
+    skip(f1, 6) # neighbors
+    #for i in range(6):
+    #    vv.append(read_vector(f1, "I")) # neighbor
+    children = na.empty((ng, 8), dtype='int64')
+    for i in range(8):
+        children[:,i] = read_vector(f1, "I") # children
+    vv['children'] = children
+    skip(f1, 8) # cpu map
+    #for i in range(8):
+    #    vv.append(read_vector(f1, "I")) # cpu map
+    rmap = na.empty((ng, 8), dtype='int64')
+    for i in range(8):
+      rmap[:,i] = read_vector(f1, "I") # refinement map
+    vv['rmap'] = rmap
+
+cur = f1.tell()
+print "Ended %s bytes from end" % (end - cur)
+#pprint.pprint(hvals)
+
+# Let's compare our hilbert curves
+for ilvl in range(hvals['nlevelmax']):
+    if ilvl not in pointers: break
+    vv = pointers[ilvl]
+    ind = vv['ind']
+    dx = 2.0**-(ilvl+1)
+    pos = vv['pos'] - dx
+    hind = get_hilbert_indices(ilvl, (pos/dx - 0.5).astype("int64"))
+    hpos = get_hilbert_points(ilvl, na.arange(ind.size).astype("int64"))
+    hpos = hpos.astype("float64") * dx * 2
+    assert(na.unique(hind).size == hind.size)
+    print (hind[1:] - hind[:-1] > 0).sum() / float(hind.size)
+    if ilvl == 1: break
+
+import matplotlib;matplotlib.use("Agg")
+import matplotlib.pyplot as plt
+from mpl_toolkits.mplot3d import Axes3D
+fig = plt.figure()
+
+plt.clf()
+ax = fig.add_subplot(111, projection='3d')
+ax.plot(hpos[:,0], hpos[:,1], hpos[:,2], '-x')
+ax.set_xlim(0,1)
+ax.set_ylim(0,1)
+ax.set_zlim(0,1)
+ax.set_title("From yt")
+plt.savefig("hind_yt.png")
+
+plt.clf()
+ax = fig.add_subplot(111, projection='3d')
+ax.plot(pos[:,0], pos[:,1], pos[:,2], '-x')
+ax.set_xlim(0,1)
+ax.set_ylim(0,1)
+ax.set_zlim(0,1)
+ax.set_title("From RAMSES")
+plt.savefig("hind_ramses.png")
+
+print hind[0], ind[0]
+from yt.mods import *
+import time
+
+pf = load("DD0039/output_0039")
+v, c = pf.h.find_max("Density")
+dd = pf.h.slice(0, c[0])
+t1 = time.time()
+data = dd["Density"]
+t2 = time.time()
+print "Took1:", data.sum(dtype='float64'), data.shape
+print "Took2: %0.3e" % (t2-t1)

File slice_coords.py

+from yt.mods import *
+import time
+
+pf = load("DD0039/output_0039")
+#v, c = pf.h.find_max("Density")
+c = [0.5, 0.5, 0.5]
+dd = pf.h.slice(0, c[0])
+data = dd["Density"]
+px = dd["px"]
+py = dd["py"]
+pdx = dd["pdx"]
+pdy = dd["pdy"]
+
+for arr in [data, px, py, pdx, pdy]:
+    print arr.min(), arr.max(), arr.size
+
+for i,chunk in enumerate(dd.chunks(None, 'spatial')):
+    print "    ", i, dd['px'].min(), dd['px'].max(), dd['px'].size, dd['Density'].size
+    assert(dd['px'].size == dd['Density'].size)

File slice_plot.py

+from yt.mods import *
+
+pf = load("DD0039/output_0039")
+pc = PlotCollection(pf, 'c')
+pc.add_slice

File spatial_fields.py

+from yt.mods import *
+
+pf = load("DD0039/output_0039")
+sp = pf.h.sphere("c", (10.0, 'pc'))
+print "STATS", sp["DivV"].max(), sp["DivV"].size, sp["DivV"].sum(dtype="float64")
+import time
+from yt.mods import *
+#pf = load("tests/DD0010/moving7_0010")
+pf = load("/home/mturk/data/DD0039/output_0039")
+sp = pf.h.sphere([0.5,0.5,0.5], 0.10)
+t1 = time.time()
+data = sp["Density"]
+print "Returned"
+print data.sum(dtype='float64'), data.shape, data.min(), data.max(), data.std(), (data==0).sum()
+t2 = time.time()
+print "%0.3e" % (t2-t1)

File sphere_grid.py

+import time
+from yt.mods import *
+from yt.geometry.selection_routines import SphereSelector
+from yt.data_objects.selection_data_containers import YTSphereBase
+
+class Dummy(object):
+    radius = 0.1
+    center = [0.5, 0.5, 0.5]
+
+ii = Dummy()
+
+
+sps = SphereSelector(ii)
+pf = load("DD0039/output_0039")
+
+spp = YTSphereBase([0.5, 0.5, 0.5], 0.1, pf=pf)
+spp._get_list_of_grids()
+
+gi = sps.select_grids(pf.h.grid_left_edge, pf.h.grid_right_edge)
+print gi.sum(),
+print gi.sum() / float(len(spp._grids))
+
+pb = get_pbar("Counting", gi.sum())
+scount = icount = 0
+for i,g in enumerate(pf.h.grids[gi]):
+    pb.update(i)
+    scount += sps.count_cells(g)
+    icount += spp._get_point_indices(g)[0].size
+pb.finish()
+print scount, icount

File test_handler.py

+from yt.mods import *
+from yt.geometry.selection_routines import \
+    SphereSelector
+
+#pf = load("output_00700/info_00700.txt")
+#pf = load("output_00007/info_00007.txt")
+pf = load("output_00045/info_00045.txt")
+pf.h
+
+class SphereMock(object):
+    center = [0.5, 0.5, 0.5]
+    radius = 0.1
+
+N = 1
+ss = SphereMock()
+sobj = SphereSelector(ss)
+t1 = time.time()
+for i in range(N):
+    mask = sobj.select_octs(pf.h.oct_handler)
+t2 = time.time()
+print mask.sum(), mask.size
+print "Percent", mask.sum() / float(mask.size)
+print "%0.3e %0.3e" % (t2-t1, (t2-t1)/float(N))
+
+t1 = time.time()
+for i in range(10):
+    count = pf.h.oct_handler.count(mask, split = False)
+t2 = time.time()
+print "To count: %0.3e" % ((t2-t1)/10)
+#inds = pf.h.oct_handler.indices(mask)
+t1 = time.time()
+for i in range(10):
+    count = pf.h.oct_handler.count(mask, split = True)
+t2 = time.time()
+print "To select: %0.3e" % ((t2-t1)/10)
+
+dd = pf.h.all_data()
+t1 = time.time()
+pf.h._identify_base_chunk(dd)
+t2 = time.time()
+print "To count and whatnot %0.3e" % (t2-t1)
+ii = pf.h._chunk_all(dd)
+chunk = ii.next()
+
+t1 = time.time()
+ic = chunk.icoords
+t2 = time.time()
+print "To coordinatize: %0.3e" % (t2-t1)

File test_hilbert.py

+from yt.utilities._amr_utils.geometry_utils import \
+    get_hilbert_points, \
+    get_hilbert_indices
+import numpy as na
+import time
+
+N = 1e8
+order = 15
+rand = (na.random.random((N, 3)) * (2**order)).astype("int64")
+rand[0,:] = 0
+#print rand
+t0 = time.time()
+ind = get_hilbert_indices(order, rand)
+print "START"
+t1 = time.time()
+pos = get_hilbert_points(order, ind)
+t2 = time.time()
+print "END"
+print na.all(pos == rand),
+print pos.min(), pos.max()
+print "Took %0.3es (%0.3e)" % (t1-t0, (t1-t0)/rand.shape[0])
+print "Took %0.3es (%0.3e)" % (t2-t1, (t2-t1)/rand.shape[0])
+from yt.geometry.oct_container import \
+    OctreeContainer
+import struct
+import numpy as na
+import pprint
+import os
+import time
+from yt.utilities._amr_utils.geometry_utils import \
+    get_hilbert_points, \
+    get_hilbert_indices
+from yt.geometry.selection_routines import \
+    SphereSelector
+
+f1 = open("output_00007/amr_00007.out00001")
+header = ( ('ncpu', 1, 'i'),
+           ('ndim', 1, 'i'),
+           ('nx', 3, 'i'),
+           ('nlevelmax', 1, 'i'),
+           ('ngridmax', 1, 'i'),
+           ('nboundary', 1, 'i'),
+           ('ngrid_current', 1, 'i'),
+           ('boxlen', 1, 'd'),
+           ('nout', 3, 'I')
+          )
+
+def read_attrs(f, attrs):
+    vv = {}
+    net_format = "="
+    for a, n, t in attrs:
+        net_format += "".join(["I"] + ([t] * n) + ["I"])
+    size = struct.calcsize(net_format)
+    vals = list(struct.unpack(net_format, f.read(size)))
+    vv = {}
+    for a, b, n in attrs:
+        s1 = vals.pop(0)
+        v = [vals.pop(0) for i in range(b)]
+        s2 = vals.pop(0)
+        if s1 != s2:
+            size = struct.calcsize("=I" + "".join(b*[n]) + "I")
+            print "S1 = %s ; S2 = %s ; %s %s %s = %s" % (
+                    s1, s2, a, b, n, size)
+            raise RuntimeError
+        assert(s1 == s2)
+        print s1, s2, a, b, n
+        if b == 1: v = v[0]
+        vv[a] = v
+    return vv
+
+def read_vector(f, d):
+    fmt = "=I"
+    ss = struct.unpack(fmt, f .read(struct.calcsize(fmt)))[0]
+    ds = struct.calcsize("=%s" % d)
+    if ss % ds != 0:
+        print "fmt = '%s' ; ss = %s ; ds = %s" % (fmt, ss, ds)
+        raise RuntimeError
+    count = ss / ds
+    #print "READING %s (%s %s)" % (count, ss, ds)
+    tr = na.fromfile(f, d, count)
+    vec = struct.unpack(fmt, f.read(struct.calcsize(fmt)))
+    assert(vec[-1] == ss)
+    global examine
+    examine = tr
+    return tr
+
+def skip(f, n = 1):
+    for i in range(n):
+        fmt = "=I"
+        ss = struct.unpack(fmt, f.read(struct.calcsize(fmt)))[0]
+        #print "SKIPPING %s byte record" % (ss)
+        f.seek(ss + struct.calcsize("=I"), os.SEEK_CUR)
+
+hvals = {}
+hvals.update(read_attrs(f1, header))
+
+noutput, iout, ifout = hvals['nout']
+
+next_set = ( ('tout', noutput, 'd'),
+             ('aout', noutput, 'd'),
+             ('t', 1, 'd'),
+             ('dtold', hvals['nlevelmax'], 'd'),
+             ('dtnew', hvals['nlevelmax'], 'd'),
+             ('nstep',  2, 'i'),
+             ('stat', 3, 'd'),
+             ('cosm', 7, 'd'),
+             ('timing', 5, 'd'),
+             ('mass_sph', 1, 'd') )
+
+hvals.update(read_attrs(f1, next_set))
+
+tree_header = ( ('headl', hvals['nlevelmax'], 'i'),
+                ('taill', hvals['nlevelmax'], 'i'),
+                ('numbl', hvals['nlevelmax'], 'i'),
+              )
+hvals.update(read_attrs(f1, tree_header))
+skip(f1)
+
+for i in range(hvals['nboundary']):
+    skip(f1, 2) # headb, tailb
+    skip(f1) # grid boundary
+
+next_set = ( ('free_mem', 5, 'i'), )
+hvals.update(read_attrs(f1, next_set))
+
+hvals['ordering'] = "".join(read_vector(f1, 'c')).strip()
+skip(f1, 4) 
+
+pointers = {}
+begin = f1.tell()
+f1.seek(0, os.SEEK_END)
+end = f1.tell()
+f1.seek(begin)
+
+print hvals['nx']
+otc = OctreeContainer(hvals['nx'],
+    (0.0, 0.0, 0.0), 
+    (hvals['boxlen'], hvals['boxlen'], hvals['boxlen']))
+
+mi = 1e30
+ma = -1e30
+
+loki = raw_input("Connected?")
+
+for ilvl in range(hvals['nlevelmax']):
+    cur = f1.tell()
+    if cur == end: break
+    ng = hvals['numbl'][ilvl]
+    print "HANDLING LEVEL %s (%s), %s from end" % (ilvl, ng, end - cur)
+    vv = pointers[ilvl] = {}
+    ind = na.empty(ng, dtype='int64')
+    ind[:] = read_vector(f1, "I")
+    vv['ind'] = ind 
+    # Skip vv to next / prev
+    skip(f1, 2)
+    # Let's break this down.  Oliver skips 3+3+6+8+8+8
+    # I think this is:
+    #   center (N,3) double
+    #   parent indices (N) int
+    #   neighbor indices (N, 8) int
+    #   child indices (N, 8) int
+    #   cpu_map (N, 8) int
+    pos = na.empty((ng, 3), dtype='float64')
+    pos[:,0] = read_vector(f1, "d") # center_x
+    pos[:,1] = read_vector(f1, "d") # center_y
+    pos[:,2] = read_vector(f1, "d") # center_z
+    vv['pos'] = pos
+    vv['parents'] = read_vector(f1, "I") # parents
+    skip(f1, 6) # neighbors
+    #for i in range(6):
+    #    vv.append(read_vector(f1, "I")) # neighbor
+    children = na.empty((ng, 8), dtype='int64')
+    for i in range(8):
+        children[:,i] = read_vector(f1, "I") # children
+    vv['children'] = children
+    #skip(f1, 8) # cpu map
+    cpu_map = na.empty((ng, 8), dtype='int64')
+    for i in range(8):
+        cpu_map[:,i] = read_vector(f1, "I") # cpu map
+    vv['cpu_map'] = cpu_map
+    rmap = na.empty((ng, 8), dtype='int64')
+    for i in range(8):
+      rmap[:,i] = read_vector(f1, "I") # refinement map
+    vv['rmap'] = rmap
+    mi = min(mi, pos.min())
+    ma = max(ma, pos.max())
+    print "NUMGRIDS", ng
+    aa = otc.nocts
+    otc.add_ramses(1, ilvl, ng, pos, vv['ind'], cpu_map)
+    print otc.nocts - aa, ng
+
+cur = f1.tell()
+print "Ended %s bytes from end" % (end - cur)
+#pprint.pprint(hvals)
+
+# Let's compare our hilbert curves
+print mi, ma
+
+class SphereMock(object):
+    center = [0.5, 0.5, 0.5]
+    radius = 0.1
+
+ss = SphereMock()
+sobj = SphereSelector(ss)
+t1 = time.time()
+for i in range(10000):
+    mask = sobj.select_octs(otc)
+t2 = time.time()
+print mask.sum(), mask.size
+print "%0.3e %0.3e" % (t2-t1, (t2-t1)/10000.)

File test_ramses_io.py

+from yt.mods import *
+import time
+
+pf = load("output_00045/info_00045.txt")
+pf.h
+t1 = time.time()
+for dom in pf.h.domains:
+    print dom.domain_id, len(dom.hydro_offset)
+t2 = time.time()
+print "%0.3e" % (t2-t1)
+
+dd = pf.h.all_data()
+t1 = time.time()
+rho = dd["Density"]
+t2 = time.time()
+print "%0.3e" % (t2-t1)
+print float((rho == 0).sum())/rho.size