Commits

Matthew Turk committed 943bac3

Latest join script, which will run in parallel AND suck in all the cycle files
it can find.

  • Participants
  • Parent commits d40a325

Comments (0)

Files changed (1)

File join_proj.py

 from yt.utilities.amr_utils import QuadTree, merge_quadtrees
 import glob
 import h5py
+import time
 
 class PFMock(dict):
     domain_left_edge = na.zeros(3, dtype='float64')
 class ProjMock(dict):
     pass
 
-NN = 32
-cycle = 515
-fields = ["Density", "HII_Density", "HeI_Density", "Metal_Density", "Ones"]
+NN = 256
+field_lists = dict(
+    unweighted = ["Density", "HII_Density", "HeI_Density",
+                  "Metal_Density", "Ones"],
+    weighted   = ["Density", "Temperature", "HII_Fraction",
+                  "H2I_Fraction", "Metal_Density"]
+)
 
-projs = []
-for ax in range(3):
-    qt_w = QuadTree(na.ones(2, "int64") * NN, 5)
-    print "Axis", ax
-    for fn in sorted(glob.glob("cycle%08i/unweighted_*.h5" % cycle)):
-        print fn
-        f = h5py.File(fn)
-        qt_a = QuadTree(na.ones(2, "int64") * NN, 5)
-        qt_a.frombuffer(f["/axis_%02i/refined" % ax][:],
-                        f["/axis_%02i/values" % ax][:],
-                        f["/axis_%02i/weight" % ax][:], "integrate")
-        merge_quadtrees(qt_w, qt_a)
-    # Now we construct our dictionary of values
-    coord_data, field_data, weight_data, dxs = [], [], [], []
-    level = 0
-    while 1:
-        npos, nvals, nwvals = qt_w.get_all_from_level(level)
-        if len(npos) == 0: break
-        coord_data.append(npos)
-        nvals /= nwvals[:,None]
-        field_data.append(nvals)
-        weight_data.append(nwvals)
-        dxs.append(1.0/(NN*2.0**level) * na.ones(nvals.shape[0], dtype="float64"))
-        level += 1
-    coord_data = na.concatenate(coord_data, axis=0).transpose()
-    field_data = na.concatenate(field_data, axis=0).transpose()
-    weight_data = na.concatenate(weight_data, axis=0).transpose()
-    dxs = na.concatenate(dxs, axis=0).transpose()
-    proj = ProjMock()
-    proj.pf = pf
-    proj['px'] = (coord_data[0,:] + 0.5) * dxs
-    proj['py'] = (coord_data[1,:] + 0.5) * dxs
-    proj['pdx'] = dxs / 2.0
-    proj['pdy'] = dxs / 2.0
-    proj['weight_field'] = weight_data
-    for field, arr in zip(fields, field_data):
-        proj[field] = arr
-    projs.append(proj)
-    proj.axis = ax
-    frb = FixedResolutionBuffer(proj, (0.0, 1.0, 0.0, 1.0), (2048, 2048))
-    write_image(na.log10(frb["Density"]), "hi_%02i.png" % ax)
+fspec = r"/scratch/bwoshea/testrun_nest_inline/cycle%08i/%s_*.h5"
+cycles = [int(os.path.basename(f)[-8:]) for f in 
+          glob.glob("/scratch/bwoshea/testrun_nest_inline/cycle*")]
+cycles.sort()
+
+for cycle in parallel_objects(cycles, -1):
+    for t in ["weighted", "unweighted"]:
+        fields = field_lists[t]
+        projs = []
+        output = h5py.File("j_%08i_%s.h5" % (cycle, t), "w")
+
+        for ax in range(3):
+            qt_w = QuadTree(na.ones(2, "int64") * NN, 5)
+            print "Axis", ax
+            t1 = time.time()
+            fns = glob.glob(fspec % (cycle, t))
+            pb = get_pbar("Cycle %08i Axis %s (%s) " % (cycle, ax, t), len(fns))
+            for i, fn in enumerate(sorted(fns)):
+                pb.update(i)
+                f = h5py.File(fn, "r")
+                qt_a = QuadTree(na.ones(2, "int64") * NN, 5)
+                qt_a.frombuffer(f["/axis_%02i/refined" % ax][:],
+                                f["/axis_%02i/values" % ax][:],
+                                f["/axis_%02i/weight" % ax][:], "integrate")
+                merge_quadtrees(qt_w, qt_a)
+                attrs = f["/attributes"].attrs.items()
+                f.close()
+            pb.finish()
+            # Now we construct our dictionary of values
+            coord_data, field_data, weight_data, dxs = [], [], [], []
+            level = 0
+            while 1:
+                npos, nvals, nwvals = qt_w.get_all_from_level(level)
+                if len(npos) == 0: break
+                coord_data.append(npos)
+                nvals /= nwvals[:,None]
+                field_data.append(nvals)
+                weight_data.append(nwvals)
+                dxs.append(1.0/(NN*2.0**level) * na.ones(nvals.shape[0], dtype="float64"))
+                level += 1
+            coord_data = na.concatenate(coord_data, axis=0).transpose()
+            field_data = na.concatenate(field_data, axis=0).transpose()
+            weight_data = na.concatenate(weight_data, axis=0).transpose()
+            dxs = na.concatenate(dxs, axis=0).transpose()
+            proj = ProjMock()
+            proj.pf = pf
+            proj['px'] = (coord_data[0,:] + 0.5) * dxs
+            proj['py'] = (coord_data[1,:] + 0.5) * dxs
+            proj['pdx'] = dxs / 2.0
+            proj['pdy'] = dxs / 2.0
+            proj['weight_field'] = weight_data
+            for field, arr in zip(fields, field_data):
+                proj[field] = arr
+            projs.append(proj)
+            proj.axis = ax
+            output.create_group("/axis_%02i" % ax)
+            for v, ds in proj.items():
+                output.create_dataset("/axis_%02i/%s" % (ax, v), data=ds)
+            print "Took %0.3e, %s" % (time.time() - t1, proj.keys())
+            #frb = FixedResolutionBuffer(proj, (0.0, 1.0, 0.0, 1.0), (4096, 4096))
+            #write_image(na.log10(frb["Density"]), "hi_%02i.png" % ax)
+
+        attr = output.create_group("/attributes")
+        for k, v in attrs:
+            attr.attrs[k] = v
+
+        output.close()