Commits

Bendik R. Opstad  committed 0c88789

Separated scripts for creating plots for master

  • Participants
  • Parent commits 2a91bc9

Comments (0)

Files changed (16)

+*.ps
+*.pdf
+*.pyc
+*.txt

File bendik_graph_base.py

 import graph_default
 graph_default.get_conf = get_default_conf
 
+from graph_default_bendik import get_latency_conf
+
+import latency
+latency.get_conf = get_latency_conf
+
 if __name__ == "__main__":
     parse_args()
     main()

File burstiness.py

     results_dict["results_file"] = results_dict["stdout_file"]
 
     results_dict["loss_all_results_file"] = os.path.join(file_conf["output_dir"], "%s%s" % (file_conf["prefix"], "loss-aggr.dat"))
-    results_dict["senttimes_all_results_file"] = os.path.join(file_conf["output_dir"], "%s%s" % (file_conf["prefix"], "packet-count-all.dat"))
+    results_dict["senttimes_all_results_file"] = os.path.join(file_conf["output_dir"], "%s%s" % (file_conf["prefix"], "throughput-aggr.dat"))
 
     files = []
     #t37-rdb-itt100:15-ps120-cccubic_vs_g10_kbit5000_min10_rtt150_loss_pif20_qlen30_delayfixed_num0_rdbsender-rdb-senttime-all-10.0.0.12-22000-10.0.0.22-5000.dat
 
     results_dict["results_cmd"] =\
         "analyseTCP -s %(src_ip)s -r %(dst_ip)s -p %(dst_port)s -f %(pcap_file)s -g %(pcap_file_receiver)s -u%(prefix)s "\
-        " -o %(output_dir)s -G%(slice_time)s -L%(slice_time)s -a 1> %(stdout_file)s" % dict(file_conf, **results_dict)
+        " -o %(output_dir)s -T%(slice_time)s -L%(slice_time)s -a 1> %(stdout_file)s" % dict(file_conf, **results_dict)
 
     file_conf["color"] = "darkred" # Default color for dataset
     file_conf["results"] = {"senttime": results_dict}
     if "percentiles" in plot_conf["box_conf"]:
         file_conf["percentiles"] = plot_conf["box_conf"]["percentiles"]
 
-def senttime_results_parse(conf, box_conf, file_data, set_key):
-    if graph.args.verbose > 2:
-        print "Reading senttime file:", file_data["results"]["senttime"]["senttimes_all_results_file"]
-    dataframe = r["read.csv"](file_data["results"]["senttime"]["senttimes_all_results_file"], header=False, **{"comment.char": "#"})#, colClasses=StrVector(("integer", "integer")))
-
-    dataframe.names[0] = "X"
-    dataframe.names[1] = "Y"
-    # Add column with color name
-    dataframe = r.cbind(dataframe, color_column=file_data["color"])
-
-    #data = []
-    #for f in file_data["results"]["senttime"]["senttime_by_stream_files"]:
-    #    print "Reading senttime file:", f
-    #    dataframe = r["read.table"](f)
-    #    data.append((f, r["as.numeric"](dataframe[0])))
-    #file_data["ecdf_values"] = data
-    file_data["data"] = dataframe
-
-
-def loss_bytes_relative_to_total_results_parse(conf, box_conf, file_data, set_key):
-    if graph.args.verbose > 2:
-        print "Reading loss_bytes_relative_to_total file:", file_data["results"]["senttime"]["loss_all_results_file"]
-    dataframe = r["read.csv"](file_data["results"]["senttime"]["loss_all_results_file"]) #, colClasses=StrVector(("integer", "integer")))
-    dataframe.names[0] = "X"
-    dataframe.names[8] = "Y"
-    # Add column with color name
-    dataframe = r.cbind(dataframe, color_column=file_data["color"])
-    dataframe[7] = dataframe[7].ro * 100
-    dataframe[8] = dataframe[8].ro * 100
-    file_data["data"] = dataframe
-
-    file_data["ecdf_values"] = [r["as.numeric"](dataframe[8])]
-    #print 'DATA1:', file_data["data"]
-    #print 'DATA1:', dataframe_data["data"]
-
-def loss_bytes_results_parse(conf, box_conf, file_data, set_key):
-    if graph.args.verbose > 2:
-        print "Reading loss_bytes_results file:", file_data["results"]["senttime"]["loss_all_results_file"]
-    dataframe = r["read.csv"](file_data["results"]["senttime"]["loss_all_results_file"]) #, colClasses=StrVector(("integer", "integer")))
-    dataframe.names[0] = "X"
-    dataframe.names[4] = "Y"
-    # Add column with color name
-    dataframe = r.cbind(dataframe, color_column=file_data["color"])
-    file_data["data"] = dataframe
-
-    file_data["ecdf_values"] = [r["as.numeric"](dataframe[4])]
-
-def loss_bytes_relative_to_interval_results_parse(conf, box_conf, file_data, set_key):
-    if graph.args.verbose > 2:
-        print "Reading loss_bytes_relative_to_interval file:", file_data["results"]["senttime"]["loss_all_results_file"]
-    dataframe = r["read.csv"](file_data["results"]["senttime"]["loss_all_results_file"]) #, colClasses=StrVector(("integer", "integer")))
-    dataframe.names[0] = "X"
-    dataframe.names[6] = "Y"
-    # Add column with color name
-    dataframe = r.cbind(dataframe, color_column=file_data["color"])
-    dataframe[6] = dataframe[6].ro * 100
-    file_data["data"] = dataframe
-
-    file_data["ecdf_values"] = [r["as.numeric"](dataframe[6])]
-
-    #print "DATA:", dataframe[6]
-
-    #print "COLUMNS:", dataframe.names
-    rows_i = robjects.IntVector(range(100))
-    subdataf = dataframe.rx(rows_i, True)
-    #print "subdataf:\n", subdataf
-    #print "SUM:", r["summary"](DataFrame({'loss': dataframe[6]}))
-    result = util.parse_r_summary(r["summary"](DataFrame({'loss': dataframe[6]})))
-    loss_stats = "{Min:03.0f} / {1st Qu:03.0f} / {Median:03.0f} / {3rd Qu:03.0f} / {Max:05.0f}".format(**result)
-    #print "loss_stats:", loss_stats
 
 
 def senttime_box_key_func(box_conf, conf):
     else:
         return "darkgreen"
 
-def set_bursty_box_conf(plot_conf, box, arg):
+#data = []
+#for f in file_data["results"]["senttime"]["senttime_by_stream_files"]:
+#    print "Reading senttime file:", f
+#    dataframe = r["read.table"](f)
+#    data.append((f, r["as.numeric"](dataframe[0])))
+#file_data["ecdf_values"] = data
 
-    if arg == "1bursty":
-        box["plot_func"] = plot_scatterplot_senttime
-        box["parse_results_func"] = senttime_results_parse
-        box["y_axis_title"] = "Packet count"
-    elif arg == "2loss_bytes_absolute":
-        box["plot_func"] = plot_scatterplot_senttime
-        box["parse_results_func"] = loss_bytes_results_parse
-        box["y_axis_title"] = "Lost bytes"
-    elif arg == "3loss":
-        box["plot_func"] = plot_scatterplot_senttime
-        box["parse_results_func"] = loss_bytes_relative_to_total_results_parse
-        box["y_axis_title"] = "Lost bytes relative to total bytes sent"
-    elif arg == "4loss_bytes":
-        box["plot_func"] = plot_scatterplot_senttime
-        box["parse_results_func"] = loss_bytes_relative_to_interval_results_parse
-        box["y_axis_title"] = "Lost bytes relative to sent in interval"
+def loss_results_parse_generic(conf, box_conf, file_data, set_key, column_name, filename, percent=False, cols=[]):
+    if graph.args.verbose > 2:
+        print "Reading loss data file:", filename
+    dataframe = r["read.csv"](filename)
+
+    for idx, name in enumerate(dataframe.names):
+        if name == column_name:
+            break
+
+    for col in cols:
+        for sidx, name in enumerate(dataframe.names):
+            if name == col:
+                break
+        #print "IDX :", dataframe.names[idx]
+        #print "SIDX:", dataframe.names[sidx]
+        #print "idx: %d, sidx: %d" % (idx, sidx)
+        robjects.globalenv["dataframe_stuff"] = dataframe
+        # Substract the values in each row of col sidx from each row of col idx
+        r("column_stuff <- dataframe_stuff$%s - dataframe_stuff$%s" % (dataframe.names[idx], dataframe.names[sidx]))
+        dataframe[idx] = r["column_stuff"]
 
-def set_bursty_box_conf_CDF(plot_conf, box, arg):
+    dataframe.names[0] = "X"
+    dataframe.names[idx] = "Y"
+
+    # Add column with color name
+    dataframe = r.cbind(dataframe, color_column=file_data["color"])
+    if percent:
+        dataframe[idx] = dataframe[idx].ro * 100
+    file_data["data"] = dataframe
+    file_data["ecdf_values"] = [r["as.numeric"](dataframe[idx])]
+    file_data["ecdf_ggplot_values"] = dataframe
+
+    #rows_i = robjects.IntVector(range(100))
+    #subdataf = dataframe.rx(rows_i, True)
+    #result = util.parse_r_summary(r["summary"](dataframe[idx]))
+    #loss_stats = "{Min:03.0f} / {1st Qu:03.0f} / {Median:03.0f} / {3rd Qu:03.0f} / {Max:05.0f}".format(**result)
+    print "result:\n", r["summary"](dataframe[idx])
+
+def set_bursty_box_conf(plot_conf, box, arg):
+    box["plot_func"] = plot_scatterplot_senttime
+    print "set_bursty_box_conf:", arg
 
     if arg == "1bursty":
-        box["plot_func"] = plot_ecdf_box
-        box["parse_results_func"] = senttime_results_parse
+        box["parse_results_func"] = lambda conf, box_conf, file_data, set_key: \
+            loss_results_parse_generic(
+            conf, box_conf, file_data, set_key, "packet_count", file_data["results"]["senttime"]["senttimes_all_results_file"]
+            )
         box["y_axis_title"] = "Packet count"
     elif arg == "2loss_bytes_absolute":
-        box["plot_func"] = plot_ecdf_box
-        box["parse_results_func"] = loss_bytes_results_parse
+        box["parse_results_func"] = lambda conf, box_conf, file_data, set_key: \
+            loss_results_parse_generic(
+            conf, box_conf, file_data, set_key, "all_bytes_lost", file_data["results"]["senttime"]["loss_all_results_file"]
+            )
         box["y_axis_title"] = "Lost bytes"
     elif arg == "3loss":
-        box["plot_func"] = plot_ecdf_box
-        box["parse_results_func"] = loss_bytes_relative_to_total_results_parse
+        box["parse_results_func"] = lambda conf, box_conf, file_data, set_key: \
+            loss_results_parse_generic(
+            conf, box_conf, file_data, set_key, "all_bytes_lost_relative_to_total", file_data["results"]["senttime"]["loss_all_results_file"], True
+            )
         box["y_axis_title"] = "Lost bytes relative to total bytes sent"
-        box["x_axis_lim"] = [0.0, 0.06]
     elif arg == "4loss_bytes":
-        box["plot_func"] = plot_ecdf_box
-        box["parse_results_func"] = loss_bytes_relative_to_interval_results_parse
+        box["parse_results_func"] = lambda conf, box_conf, file_data, set_key: \
+            loss_results_parse_generic(
+            conf, box_conf, file_data, set_key, "all_bytes_lost_relative_to_interval", file_data["results"]["senttime"]["loss_all_results_file"], True
+            )
         box["y_axis_title"] = "Lost bytes relative to sent in interval"
-        box["x_axis_lim"] = [0, 100]
+    elif arg == "5new_bytes_lost_relative_to_all_bytes_lost":
+        box["parse_results_func"] = lambda conf, box_conf, file_data, set_key: \
+            loss_results_parse_generic(
+            conf, box_conf, file_data, set_key, "new_bytes_lost_relative_to_all_bytes_lost", file_data["results"]["senttime"]["loss_all_results_file"], True
+            )
+        box["y_axis_title"] = "New bytes lost relative to all lost in interval"
+    elif arg == "6old_bytes_lost_relative_to_all_bytes_lost":
+        box["parse_results_func"] = lambda conf, box_conf, file_data, set_key: \
+            loss_results_parse_generic(
+            conf, box_conf, file_data, set_key, "old_bytes_lost_relative_to_all_bytes_lost", file_data["results"]["senttime"]["loss_all_results_file"], True
+            )
+        box["y_axis_title"] = "Old bytes lost relative to all lost in interval"
 
+
+def set_bursty_box_conf_CDF(plot_conf, box, arg):
+    set_bursty_box_conf(plot_conf, box, arg)
+    box["plot_func"] = ggplot_cdf_box
     box["percentiles"] = [.9]
 
 def set_box_key_value(file_conf, arg):
     conf["set_conf"] = { "key": "Streams:%(stream_count_thin1)s_Payload:%(payload_thin1)s_ITT:%(itt_thin1)s_RTT:%(rtt)s_PIFG:%(group)s" }
     conf["page_group_def"] = {"title": "Payload: %(payload_thin1)s, RTT: %(rtt)s Payload: %(payload_thin1)s  QLEN: %(queue_len)s Duration: %(duration)s min",
                               "sort_keys": ["payload_thin1", "rtt"], "sort_key_func": lambda x: (x[0], x[1])}
-    conf["plot_conf"].update({"n_columns": 1, "n_rows" : 2, "x_axis_lim" : [0, 600],
-                              "y_axis_lim" : [0, 1],
+    conf["plot_conf"].update({"n_columns": 1, "n_rows" : 7, "x_axis_lim" : [0, 600],
                               #"plot_func": plot_scatterplot_senttime,
-                              "plot_func": plot_ecdf_box,
-                              #"do_plots_func": do_ggplots,
+                              #"plot_func": plot_ecdf_box,
+                              "plot_func": ggplot_cdf_box,
+                              "do_plots_func": do_ggplots,
                               "legend": { "color_func": get_color_senttime, "values": {"type_thick": "Greedy", "type_thin1": "Thin"}},
                               "box_commands": thin_itt_mark_line_func,
-                              #"set_box_conf": set_bursty_box_conf,
-                              "set_box_conf": set_bursty_box_conf_CDF,
+                              "set_box_conf": set_bursty_box_conf,
                               }
                              )
     conf["file_parse"].update({ "func": senttime_file_parse_func,
-                                #"parse_func_arg": ["1bursty", "2loss_bytes_absolute", "3loss", "4loss_bytes"] })
-                                "parse_func_arg": ["3loss", "4loss_bytes"] })
+                                "parse_func_arg": ["1bursty", "2loss_bytes_absolute", "3loss", "4loss_bytes", "5new_bytes_lost_relative_to_all_bytes_lost"] })
+                                #"parse_func_arg": ["3loss", "4loss_bytes"] })
+                                #"parse_func_arg": ["3loss"] })
                                 #"parse_func_arg": ["1bursty"] })
     conf["document_info"]["title"] = "Senttime Plots"
     conf["document_info"]["show"] = False
-    #conf["paper"] = "special"
-    #conf["paper_width"] = 100
-    #conf["paper_height"] = 12
+    conf["paper"] = "special"
+    conf["paper_width"] = 100
+    conf["paper_height"] = 30
 
     conf["file_parse"]["files_nomatch_regex"].append(".*qlen46875.*")
-    #conf["plot_conf"].update({ "n_columns": 1, "n_rows" : 1})
+
+    ### CDF settings
+    #conf["paper_width"] = 20
+    #conf["paper_height"] = 15
+    #conf["paper_width"] = 14
+    #conf["paper_height"] = 6
+    #conf["plot_conf"].update({ "n_columns": 2, "n_rows" : 1,
+    #                           "set_box_conf": set_bursty_box_conf_CDF,
+    #                           #"y_axis_lim" : [0, 1],
+    #                           })
+    #conf["file_parse"].update({"parse_func_arg": ["5new_bytes_lost_relative_to_all_bytes_lost", "6old_bytes_lost_relative_to_all_bytes_lost"
+    #                                              #"3loss", "4loss_bytes"
+    #                                              ]})
     return conf

File conn_info.py

         self.key_regex = False
         self.__dict__.update(kw)
 
-    #def __get__(self, key):
-    #    if key in self.args:
-    #        return self.args[key]
-    #    else:
-    #        return None
-
 def conn_info_get_aggregated_data(conf, filename):
-    #print "probe file:", file_data["tcp_probe_data"]
-
     keys = [DataMatch("Number of retransmissions", name="Number of retrans"),
             DataMatch("Occurrences of 1. retransmission",            name="1. retrans",        value_regex=None),
             DataMatch("Occurrences of 2. retransmission",            name="2. retrans",        value_regex=None),
     with open(file_data["results"]["conninfo"]["results_file"], 'r') as data_file:
         content = data_file.read()
 
-    #print "results_file:", file_data["results"]["conninfo"]["results_file"]
-
-    #"Average                                    0                   9816          14.5 %        14.1 %"
-    regex = "Average\s+0\s+(?P<packets_sent>\d+)\s+(?P<bytes_loss>\S+)\s\%\s+(?P<ranges_loss>\S+)\s\%"
-    m = re.search(regex, content, flags=re.DOTALL)
-    if not m:
-        cprint("Failed to match regex: %s" % regex, "red")
-        return
-
-    # _collapse_thin_0_collapse_thick_0_segoff_off_segoff_thin_off_segoff_thick_off
-    #regex = "(?:_(?:collapse_thin|clps_thick)_(?P<collapse_thin>\d))?"\
-    #    "(?:_(?:collapse_thick|clps_thick)_(?P<collapse_thick>\d))?"\
-    #    "(?:_segoff_(?P<segoff>[a-z]+))?"\
-    #    "(?:_segoff_thin_(?P<segoff_thin>[a-z]+))?"\
-    #    "(?:_segoff_thick_(?P<segoff_thick>[a-z]+))?"
-    #    #".+"
     regexes = []
     regex = "(?:_clps-t-(?P<collapse_thin>\d))"\
         "(?:_clps-g-(?P<collapse_thick>\d))"\
             if m_extra:
                 break
 
-    regex = "(?P<src_ip>\S+)-(?P<src_port>\S+)-(?P<dst_ip>\S+)-(?P<dst_port>\S+)\s+(?P<duration>\d+)\s+(?P<packets_sent>\d+)\s+(?P<bytes_loss>\S+)\s\%\s+(?P<ranges_loss>\S+)\s\%"
+             #10.0.0.13-12000-10.0.0.22-5001:            298             9.07 / 9.07  4278         3891            9.05 %          9.06 %        9.05 %
+             #10.0.0.15-32000-10.0.0.22-5003                                             900                9010          8979            0.34 %        0.34 %        0.34 %
+    regex = "(?P<src_ip>\S+)-(?P<src_port>\S+)-(?P<dst_ip>\S+)-(?P<dst_port>\S+):\s+"\
+            "(?P<duration>\d+)\s+(?P<est_loss>\S+\s/\s\S+)\s+(?P<packets_sent>\d+)\s+(?P<packets_received>\d+)\s+"\
+            "(?P<packet_loss>\S+)\s\%\s+(?P<bytes_loss>\S+)\s\%\s+(?P<ranges_loss>\S+)\s\%"
     m3 = re.finditer(regex, content, flags=re.DOTALL)
     bytes_loss = []
+    packet_loss = []
     if not m3:
         print "Failed to match regex!"
         return
 
     for l in m3:
         bytes_loss.append(float(l.group("bytes_loss")))
+        packet_loss.append(float(l.group("packet_loss")))
 
     # Handle R loss and latency results
-    dataf = DataFrame({'bytes_loss': FloatVector(bytes_loss)})
-    result = util.parse_r_summary(r["summary"](dataf))
-    loss_stats = "{Min:03.1f} / {1st Qu:03.1f} / {Median:03.1f} / {3rd Qu:03.1f} / {Max:03.1f}".format(**result)
+    bytes_loss_dataf = DataFrame({'bytes_loss': FloatVector(bytes_loss)})
+    bytes_loss_summary = util.parse_r_summary(r["summary"](bytes_loss_dataf))
+
+    packet_loss_dataf = DataFrame({'packet_loss': FloatVector(packet_loss)})
+    packet_loss_summary = util.parse_r_summary(r["summary"](packet_loss_dataf))
+
+    loss_stats = "{Min:03.1f} / {1st Qu:03.1f} / {Median:03.1f} / {3rd Qu:03.1f} / {Max:03.1f}".format(**bytes_loss_summary)
 
     print "Latency file:", file_data["results"]["latency"]["results_file"]
     # Latency
-    dataframe = r["read.csv"](file_data["results"]["latency"]["results_file"], header=False) #, colClasses=StrVector(("integer", "integer")))
-    result = util.parse_r_summary(r["summary"](DataFrame({'ack_latency': dataframe[1]})))
-    latency_stats = "{Min:03.0f} / {1st Qu:03.0f} / {Median:03.0f} / {3rd Qu:03.0f} / {Max:05.0f}".format(**result)
+    latency_dataf = r["read.csv"](file_data["results"]["latency"]["results_file"], header=False) #, colClasses=StrVector(("integer", "integer")))
+    ack_latency_summary = util.parse_r_summary(r["summary"](DataFrame({'ack_latency': latency_dataf[1]})))
+    latency_stats = "{Min:03.0f} / {1st Qu:03.0f} / {Median:03.0f} / {3rd Qu:03.0f} / {Max:05.0f}".format(**ack_latency_summary)
+    print "latency_stats:", latency_stats
 
     print "LATENCY - stdout_file:", file_data["results"]["latency"]["stdout_file"]
     data_aggr_data = conn_info_get_aggregated_data(conf, file_data["results"]["latency"]["stdout_file"])
     if file_data.get("stream_count_thin2", None):
         streams += "vs%s" % (file_data["stream_count_thin2"])
 
-    data = dict(**file_data)
-
-
-    #print "\n\n\ndata_aggr_data:", data_aggr_data
-
-    #print "data:", data.keys()
 
+    data = dict(**file_data)
     data["rdb_packet_count"] = data_aggr_data["RDB packets"]
     data["data_packet_count"] = data_aggr_data["Data packets sent"]
 
+    print "S:", file_data["streams"][file_data["streams_id"]]
+
+    #data["payload"] = file_data["streams"][file_data["streams_id"]].get("payload", "")
+    data.update(file_data["streams"][file_data["streams_id"]])
+
     #print "data_aggr_data:", data_aggr_data.keys()
     ps_percentiles = "%4s/%4s/%4s/%4s/%4s/%4s/%4s/%4s/%4s" % (data_aggr_data["ps_1_percentile"], data_aggr_data["ps_5_percentile"], data_aggr_data["ps_10_percentile"],
                                                               data_aggr_data["ps_25_percentile"], data_aggr_data["ps_50_percentile"], data_aggr_data["ps_75_percentile"],
 
     #print "%15s - type_thin1: %s, type_id: %s, type: %s" % (data["hostname"], data["type_thin1"], data["type_id"], data["type"])
 
-    data.update({ "bytes_loss" : float(m.group("bytes_loss")),
-                  "ranges_loss": m.group("ranges_loss"),
-                  "R_loss_stats": loss_stats,
-                  "R_latency_stats": latency_stats,
-                  "streams" : streams,
-                  "packet_size": ps_percentiles,
-                  "latency": latency_percentiles,
-                  })
+    data.update({"bytes_loss" : float("%.2f" % bytes_loss_summary["Mean"]),
+                 "packet_loss": float("%.2f" % packet_loss_summary["Mean"]),
+                 "R_loss_stats": loss_stats,
+                 "R_latency_stats": latency_stats,
+                 "streams" : streams,
+                 "packet_size": ps_percentiles,
+                 "latency": latency_percentiles,
+             })
 
     #"rdb_packet_count"
 
     if m_extra:
-        data.update({ "collapse_thin" : m_extra.group("collapse_thin"),
-                      "collapse_thick" : m_extra.group("collapse_thick"),
-                      "segoff" : m_extra.group("segoff"),
-                      "segoff_thin" : m_extra.group("segoff_thin"),
-                      "segoff_thick": m_extra.group("segoff_thick"),
-                      })
+        data.update({"collapse_thin" : m_extra.group("collapse_thin"),
+                     "collapse_thick" : m_extra.group("collapse_thick"),
+                     "segoff" : m_extra.group("segoff"),
+                     "segoff_thin" : m_extra.group("segoff_thin"),
+                     "segoff_thick": m_extra.group("segoff_thick"),
+                     })
 
         if "gso_thin" in m_extra.groupdict():
             data["gso_thin"] = m_extra.group("gso_thin") if m_extra.group("gso_thin") else "-"
             data["gro_thin"] = m_extra.group("gro_thin") if m_extra.group("gro_thin") else "-"
             data["lro_thin"] = m_extra.group("lro_thin") if m_extra.group("lro_thin") else "-"
 
+    with open(file_data["results"]["loss_per_stream"]["stdout_file"], 'r') as data_file:
+        content = data_file.read()
+        content = content.split("Connections in sender dump")[1]
+        content = "%s: %s" % (file_data["hostname"], content)
+        conf["results"]["loss_per_stream"]["data"].append(content)
+
+    #print "data:", data.keys()
+
     key = "%s:%s-%s:%s" % (l.group("src_ip"), l.group("src_port"), l.group("dst_ip"), l.group("dst_port"))
     conf["results"]["conninfo"]["loss_stats"].append(data)
 
         return False
     #print "pcap_file:", pcap_file
     file_conf["prefix"] = "%s-" % file_conf["data_file_name"]
-    file_conf["results"] = {"conninfo": {}, "latency": {}}
+    file_conf["results"] = {"conninfo": {}, "latency": {}, "loss_per_stream": {}}
 
     file_conf["results"]["conninfo"]["stdout_file"] = os.path.join(file_conf["output_dir"], "%s%s" % (file_conf["prefix"], "command.stout"))
     file_conf["results"]["conninfo"]["results_file"] = file_conf["results"]["conninfo"]["stdout_file"]
         "analyseTCP -s %(src_ip)s -r %(dst_ip)s -p %(dst_port)s -f %(pcap_file)s -g %(pcap_file_receiver)s -o %(output_dir)s -A -u%(prefix)s -l -i1,5,10,25,50,75,90,95,99 1> %(stdout_file)s" %\
         dict(file_conf, **file_conf["results"]["latency"])
 
+    results_file = os.path.join(file_conf["output_dir"], "%s%s" % (file_conf["common_prefix"], "loss-all-streams.dat"))
+    plot_conf["results"]["loss_per_stream"]["results_file"] = results_file
+
+    file_conf["results"]["loss_per_stream"]["results_file"] = results_file
+    file_conf["results"]["loss_per_stream"]["stdout_file"] = os.path.join(file_conf["output_dir"], "%s%s" % (file_conf["prefix"], "loss-all-streams.stout"))
+    file_conf["results"]["loss_per_stream"]["results_cmd"] = \
+        "analyseTCP -s %(src_ip)s -r %(dst_ip)s -p %(dst_port)s -f %(pcap_file)s -g %(pcap_file_receiver)s -e -vvv 1> %(stdout_file)s" %\
+        dict(file_conf, **file_conf["results"]["loss_per_stream"])
+
     file_conf["color"] = "darkred" # Default color for dataset
 
     file_conf["group"] = file_conf["type_id"]
     if file_conf["thin_dupack"] is None:
         file_conf["thin_dupack"] = 0
 
-    #file_conf["itt"] = None
-
-    #file_conf["type"] = parse_func_arg
-
-    if file_conf["type"] == "thin1":
-        #file_conf["dst_port"] = "%d-%d" % (thin1_start, thin1_start + file_conf["stream_count_thin1"])
-        file_conf["itt"] = file_conf["itt_thin1"]
-    elif file_conf["type"] == "thin2":
-        #file_conf["dst_port"] = "%d-%d" % (thin2_start, thin2_start + file_conf["stream_count_thin2"])
-        file_conf["itt"] = file_conf["itt_thin2"]
-
-
 def connection_info_process_results(conf):
 
     with open(conf["output_file"], 'a') as out:
             for sort_key in conf["process_results"]["sort_keys"]:
                 sorted_data = sorted(sorted_data, key=lambda k: k[sort_key])
 
+        #print "sorted_data:", sorted_data
         for i, e in enumerate(sorted_data):
             headers = OrderedDict().fromkeys(conf["process_results"]["headers"], conf["process_results"]["default_column_width"])
 
                     ansi_escape = re.compile(r'\x1b[^m]*m')
                     out.write((conf["process_results"]["separate"]["char"] * len(ansi_escape.sub('', line).rstrip())) + "\n")
 
+    print 'results_file:', conf["results"]["loss_per_stream"]["results_file"]
+    # Process the -e output
+    with open(conf["results"]["loss_per_stream"]["results_file"], 'w') as out:
+        data = conf["results"]["loss_per_stream"]["data"]
+        for d in data:
+            out.write(d)
+
+
 def _get_default_conf():
     conf = deepcopy(graph_default.get_conf())
     conf["box_conf"] = { "key": "Streams:%(stream_count_thin1)s_Payload:%(payload_thin1)s_ITT:%(itt_thin1)s_RTT:%(rtt)s_PIFG:%(group)s",
     conf["file_parse"]["func"] = connection_info_file_parse_func_time
     conf["plot_conf"]["make_plots"] = False
     conf["results"]["conninfo"] = { "loss_stats": []}
+    conf["results"]["loss_per_stream"] = { "data": []}
+
     #conf["file_parse"]["files_match_exp"] = "*bridge2-eth2*%s" % conf["file_parse"]["pcap_suffix"]
 
     conf["process_results"] = { "func": connection_info_process_results, "column_widths" : {},
-                                                "separate": { "func": lambda line_index: line_index % 4 == 3, "char": "-" }
-                                                }
+                                "separate": { "func": lambda line_index: line_index % 4 == 3, "char": "-" }
+    }
     #conf["process_results"]["sort_keys"] = ["collapse_thin", "segoff_thin", "segoff_thick", "hostname", "streams", "duration"] # "bytes_loss", "segoff",
     conf["process_results"]["sort_keys"] = ["duration", "type", "hostname", "num", "queue_len", "thin_dupack",
                                             # "collapse_thin", "segoff_thin", "segoff_thick",
                                             "packets_in_flight", "stream_count_thin1", "stream_count_thick", "duration", "itt_thin1"] # "bytes_loss", "segoff",
-    conf["process_results"]["headers"] = ["Host", "Str", "RDB", "PIF", "QLEN", "DA", "RTT", "ITT", "PS", "Dur", "D-PKT", "B-PKT",
+    conf["process_results"]["headers"] = ["Host", "Str", "RDB", "PIF", "QLEN", "RC", "RTT", "ITT", "PS", "Dur", "D-PKT", "B-PKT",
                                                           #"clps-t",
                                                           #"colps-thick",
                                                           #"soff", "soff_t", "soff_g",
                                                           #"gso_t", "tso_t", "lro_t", "gro_t",
                                                           #"segoff_thick", "bridge_rate",
-                                                          "BLoss", "RLoss", "Test", "R_loss_stats", "R_latency_stats", "latency"]
-
-    conf["process_results"]["value_keys"] = OrderedDict().fromkeys(["hostname", "streams", "type", "packets_in_flight", "queue_len", "thin_dupack", "rtt", "itt", "payload",
-                                                                                    "duration", "data_packet_count", "rdb_packet_count",
-                                                                                    #"collapse_thin", "segoff", "segoff_thin", "segoff_thick",
-                                                                                    #"gso_thin", "tso_thin", "lro_thin", "gro_thin",
-                                                                                    #"segoff_thick", "bridge_rate",
-                                                                                    "bytes_loss", "ranges_loss", "num", "R_loss_stats", "R_latency_stats", "latency"], None)
+                                                          "BLoss", "PLoss", "Test", "R_loss_stats", "R_latency_stats", "latency"]
+
+    conf["process_results"]["value_keys"] = OrderedDict().fromkeys(["hostname", "streams", "type", "packets_in_flight", "queue_len", "retrans_collapse", "rtt", "itt", "payload",
+                                                                    "duration", "data_packet_count", "rdb_packet_count",
+                                                                    #"collapse_thin", "segoff", "segoff_thin", "segoff_thick",
+                                                                    #"gso_thin", "tso_thin", "lro_thin", "gro_thin",
+                                                                    #"segoff_thick", "bridge_rate",
+                                                                    "bytes_loss", "packet_loss", "num", "R_loss_stats", "R_latency_stats", "latency"], None)
     conf["process_results"]["default_column_width"] = 12
-    conf["process_results"]["column_widths"].update({"Host": 11, "Str": 7, "RDB": 3, "PIF": 3, "QLEN": 6, "DA": 2, "RTT": 4, "D-PKT": 6, "B-PKT": 6,
+    conf["process_results"]["column_widths"].update({"Host": 11, "Str": 7, "RDB": 3, "PIF": 3, "QLEN": 6, "RC": 2, "RTT": 4, "D-PKT": 6, "B-PKT": 6,
                                                      "ITT": 6, "PS": 4, "Dur": 3, "soff": 4, "soff_t": 4, "soff_g": 4,
                                                      "gso_t": 5, "tso_t": 5, "gro_t": 5, "lro_t": 5,
                                                      "3rd Qu.": 6, "Test": 4, "bridge_rate" : 15, "clps-t": 6,
-                                                     "R_loss_stats": 28, "R_latency_stats": 30, "latency": 50,
-                                                     "BLoss": 5, "RLoss": 5 })
+                                                     "R_loss_stats": 35, "R_latency_stats": 30, "latency": 45,
+                                                     "BLoss": 8, "PLoss": 5 })
 
     def hostname_color(hostname, row_values):
         return colored(hostname, "yellow" if hostname == "ysender" else "white")
             if conf["file_parse"]["files_match_regex"]:
                 if match(basename, conf["file_parse"]["files_match_regex"]):
                     files_to_use.append(f)
+            else:
+                files_to_use.append(f)
     else:
         files_to_use = files
 
         basename = os.path.basename(f)
         #print "basename:", basename
         # If multiple regex are defined, use the first that matches
-        for i, regex in enumerate(regexes):
-            m = re.match(regex, basename, flags=re.DOTALL)
+        for i, regex_arg in enumerate(regexes):
+            regex, func = regex_arg
+            m = re.match(regex, basename, flags=re.DOTALL|re.VERBOSE)
             if m:
-                #print "Regex %d matched" % i
+                if func:
+                    file_conf = func(basename)
+                    #print "Regex %d matched" % i
+                else:
+                    file_conf = m.groupdict()
                 break
         if not m:
             print "No match for file '%s'!" % basename
             return
 
-        file_conf = m.groupdict()
         #print "GROUPDICT:", file_conf
         for k in file_conf:
-            if file_conf[k] is None:
-                continue
-            if file_conf[k].isdigit():
-                file_conf[k] = int(file_conf[k])
+            if type(file_conf[k]) is str:
+                if file_conf[k].isdigit():
+                    file_conf[k] = int(file_conf[k])
 
         # Calling the file parse func defined on plot config
         ret = plot_conf["file_parse"]["func"](plot_conf, file_conf, f, basename, arg)
         box_key = plot_conf["box_conf"]["key"] % file_conf
         set_key = plot_conf["set_conf"]["key"] % file_conf
 
-        def set_box_conf(plot_conf, arg):
+        def set_box_conf(plot_conf, ignore):
             if "parse_results_func" in plot_conf:
                 groups[box_key]["parse_results_func"] = plot_conf["parse_results_func"]
 
                     cmds.append(cmd)
                 groups[box_key]["custom_cmds"] = cmds
 
-
         if not box_key in groups:
             groups[box_key] = deepcopy(get_defaults_plot_box_conf())
             groups[box_key]["box_conf"] = plot_conf["box_conf"]
         #print
         #print "box_key:", box_key
         #print "set key:", set_key
+        file_conf["box_conf"] = groups[box_key]
 
         if "func" in plot_conf["box_conf"] and plot_conf["box_conf"]["func"]:
             plot_conf["box_conf"]["func"](groups[box_key], file_conf)
 
     # Sort the boxes within each group list
     group_list = []
+    print "Groups:", len(groups)
 
     for box in sorted(groups.values(), key=plot_conf["box_conf"]["sort_key_func"]):
         #print "TEST2:", box["sets"].keys()
     plot_conf["output_dir"] = args.output_dir
     plot_conf["force"] = args.force
 
-    files = get_files(args.directory, plot_conf)
+    files = []
+    for d in args.directories:
+        files += get_files(d, plot_conf)
+    print "Files:", files
     groups_list = parse_filenames(files, plot_conf)
     #print "groups_list:", groups_list
     #for f in files:
 
     if not conf is None:
         do_plots(args, conf)
+
+    if conf["view_results"]["execute"]:
+        name, extension = os.path.splitext(conf["output_file"])
+        print "extension:", extension
+        if conf["view_results"]["viewers"].get(extension, None) is None:
+            print "No viewer is configured for extension: '%s'", extension
+        else:
+            cmd = "%s %s" % (conf["view_results"]["viewers"][extension], conf["output_file"])
+            ret = call(cmd, shell=True)

File graph_base.py

     end_time = datetime.now()
     print "Finished creating plots in %s (at %s)" % (str((end_time - start_time)), str(end_time.strftime("%H:%M:%S")))
 
-def parse_args(argparser=None):
+def parse_args(argparser=None, defaults=None):
     global start_time
     start_time = datetime.now()
     if argparser is None:
         argparser = argparse.ArgumentParser(description="Create graphs")
 
-    argparser.add_argument("-d", "--directory",  help="The directory containing the pcap files.", required=False, default=".")
     argparser.add_argument("-od", "--output-dir",  help="The output directory to save the the csv results.", required=False, default="data_dir")
     argparser.add_argument("-f", "--force",  help="Force recreating the throughput CSV data files.", action='store_true', required=False, default=False)
     argparser.add_argument("-p", "--print-conf",  help="Pretty print the page/box/set/file config structure.", action='store_true', required=False, default=False)
     argparser.add_argument("-ci", "--connection-info-output-file",  help="Write connection info to the to specified file.", required=False, default=False)
     argparser.add_argument("-qd", "--queueing-delay-output-file",  help="Write queueing delay to specified file.", required=False, default=False)
     argparser.add_argument("-bn", "--burstiness-output-file",  help="Write burstiness values to specified file.", required=False, default=False)
+    argparser.add_argument("-vi", "--view-results",  help="Open the results file.", action='store_true', required=False, default=False)
+    argparser.add_argument("directories", help="The directories containing the pcap files.", nargs='+')
+
+    if defaults:
+        argparser.set_defaults(**defaults)
 
     args = argparser.parse_args()
     graph.args = args
 
-    if not args.directory:
-        args.directory = os.getcwd()
     if args.output_dir:
         if not os.path.isdir(args.output_dir):
             os.mkdir(args.output_dir)

File graph_default.py

 from util import cprint
 
 dst_ip = "10.0.0.22"
-host_ip_map = { 'rdbsender': "10.0.0.12", 'zsender': "10.0.0.13", "ysender": "10.0.0.14", "bridge2-eth2": "10.0.0.14" }
+host_ip_map = { 'rdbsender': "10.0.0.12", 'zsender': "10.0.0.13", "ysender": "10.0.0.14", "wsender": "10.0.0.15", "bridge2-eth2": "10.0.0.14" }
 host_stream_id_map = { 'rdbsender': {"rdb": "thin rdb%(packets_in_flight)s", "tcp": "thin tcp"} ,
                        'ysender': {"rdb": "thin rdb%(packets_in_flight)s", "tcp": "thin tcp"},
                        'bridge2-eth2': {"rdb": "thin rdb%(packets_in_flight)s", "tcp": "thin tcp"},
                        'zsender': {"rdb": "thick rdb(%(packets_in_flight)s)", "tcp": "thick tcp"} }
 
-host_stream_type_map = { 'rdbsender': "thin", 'ysender': "thin", 'zsender': "thick", "bridge2-eth2": "thin" }
+host_stream_type_map = { 'rdbsender': "thin", 'ysender': "thin", 'wsender': "thin", 'zsender': "thick", "bridge2-eth2": "thin" }
 
-host_stream_type_id_map = { 'rdbsender': "type_thin1", 'ysender': "type_thin1", 'zsender': "type_thick", "bridge2-eth2": "type_thin1" }
-host_stream_itt_id_map = { 'rdbsender': "itt_thin1", 'ysender': "itt_thin1", 'zsender': "itt_thick", "bridge2-eth2": "itt_thin1" }
-host_stream_payload_id_map = { 'rdbsender': "payload_thin1", 'ysender': "payload_thin1", 'zsender': "payload_thick", "bridge2-eth2": "payload_thin1" }
+host_stream_type_id_map = { 'rdbsender': "type_thin1", 'ysender': "type_thin2", 'wsender': "type_thin3", 'zsender': "type_thick", "bridge2-eth2": "type_thin1" }
+#host_stream_itt_id_map = { 'rdbsender': "itt_thin1", 'ysender': "itt_thin1", 'zsender': "itt_thick", "bridge2-eth2": "itt_thin1" }
+#host_stream_payload_id_map = { 'rdbsender': "payload_thin1", 'ysender': "payload_thin1", 'zsender': "payload_thick", "bridge2-eth2": "payload_thin1" }
 
 # Two thin streams
-host_stream_type_id_map2 = { 'rdbsender': "type_thin1", 'ysender': "type_thin2", 'zsender': "type_thick", "bridge2-eth2": "type_thin2" }
-host_stream_itt_id_map2 = { 'rdbsender': "itt_thin1", 'ysender': "itt_thin2", 'zsender': "itt_thick", "bridge2-eth2": "itt_thin2" }
-host_stream_payload_id_map2 = { 'rdbsender': "payload_thin1", 'ysender': "payload_thin2", 'zsender': "payload_thick", "bridge2-eth2": "payload_thin2" }
+#host_stream_type_id_map2 = { 'rdbsender': "type_thin1", 'ysender': "type_thin2", 'zsender': "type_thick", "bridge2-eth2": "type_thin2" }
+#host_stream_itt_id_map2 = { 'rdbsender': "itt_thin1", 'ysender': "itt_thin2", 'zsender': "itt_thick", "bridge2-eth2": "itt_thin2" }
+#host_stream_payload_id_map2 = { 'rdbsender': "payload_thin1", 'ysender': "payload_thin2", 'zsender': "payload_thick", "bridge2-eth2": "payload_thin2" }
 
 
 #dstPorts = { 'rdbsender': 5000, 'zsender': 5001, 'ysender': 5002, "bridge2-eth2": 5002 }
-dstPorts = { 'rdbsender': 5000, 'zsender': 5001, 'ysender': 5000 }
-dstPorts2 = { 'rdbsender': 5000, 'zsender': 5001, 'ysender': 5002 }
-set_order = {0: 1, 3: 2, 6: 3, 10: 4, 20: 5}
+dstPorts = { 'rdbsender': 5000, 'zsender': 5001, 'ysender': 5002, 'wsender': 5003 }
 
+set_order = {0: 1, 4: 2, 6: 3, 10: 4, 20: 5}
+
+hostname_id_map = {"rdbsender": "r",
+                   "ysender": "y",
+                   "wsender": "w",
+                   "zsender": "z"}
 
 color_map = {
     "default": "black",
     "thick": "darkred",
     "thick2": "blue",
     "thin": "darkgoldenrod1",
-    "tcp_pif0": "darkgoldenrod1",
     "rdb_pif3": "darkblue",
+    "rdb_pif4": "darkblue",
     "rdb_pif6": "cyan",
     "rdb_pif10": "darkgreen",
     "rdb_pif20": "darkorchid",
     "rdb_pif300": "darkorchid",
+    "rdb_pif200": "darkorchid",
     }
 
 def get_color(plot):
     pif_type = plot["type_id_pif"]
     if stream_id == "type_thick":
         return color_map["thick"]
-    return host_type_color_map["rdbsender"][pif_type]
+    return host_type_color_map[plot["hostname"]][pif_type]
+
 
+from collections import defaultdict as d_dict
+#collections.defaultdict([default_factory[, ...]])
 
 host_type_color_map = {
-    "rdbsender": dict(color_map, tcp=color_map["thin"]),
-    "zsender": { "default": "darkred", "tcp": "darkred", "rdb": "darkred" }
+    "rdbsender": d_dict(lambda: "darkblue", color_map, tcp=color_map["thin"]),
+    "ysender": d_dict(lambda: "darkgoldenrod1", {"tcp": "darkgoldenrod1"}),
+    "wsender": d_dict(lambda: "cyan", {"tcp": "cyan"}),
+    "zsender": { "default": "darkred", "tcp": "darkred", "rdb": "darkred"},
     }
 
 
     if not host in dstPorts:
         return False
 
+    file_conf["streams_id"] = hostname_id_map[file_conf["hostname"]]
+    if not file_conf["streams_id"] in file_conf["streams"]:
+        print "ID missing!"
+        if file_conf["streams_id"] == "r":
+            file_conf["streams_id"] = "t"
+        if file_conf["streams_id"] == "z":
+            file_conf["streams_id"] = "g"
+
     file_conf["pcap_file_receiver"] = pcap_file.replace("%s%s" % (file_conf["hostname"], plot_conf["file_parse"]["pcap_suffix"]), "zreceiver" + plot_conf["file_parse"]["pcap_suffix"])
 
     type_id = host_stream_type_id_map[host]
-    itt_id = host_stream_itt_id_map[host]
-    payload_id = host_stream_payload_id_map[host]
+    #itt_id = host_stream_itt_id_map[host]
+    #payload_id = host_stream_payload_id_map[host]
 
     file_conf["type_thick"] = "tcp"
     file_conf["dst_port"] = dstPorts[host]
         q_type = "bfifo"
     file_conf["queue_type"] = q_type
 
-    # We have two thin streams
-    if file_conf.get("type_thin2", None) is not None:
-        type_id = host_stream_type_id_map2[host]
-        itt_id = host_stream_itt_id_map2[host]
-        payload_id = host_stream_payload_id_map2[host]
-        file_conf["dst_port"] = dstPorts2[host]
+    ## We have two thin streams
+    #if file_conf.get("type_thin2", None) is not None:
+    #    type_id = host_stream_type_id_map2[host]
+    #    itt_id = host_stream_itt_id_map2[host]
+    #    payload_id = host_stream_payload_id_map2[host]
+    #    file_conf["dst_port"] = dstPorts2[host]
 
     file_conf["streams_str"] = "%(stream_count_thick)d vs %(stream_count_thin1)d" % file_conf
     file_conf["stream_type"] = host_stream_type_map[host]
 
-    file_conf["type"] = file_conf[type_id]
+    #file_conf["type"] = file_conf[type_id]
     #print "type_id:", type_id
     #print 'file_conf["type"]:', file_conf["type"]
     #print "Setting type for '%10s' with type_id: '%s' : %s" % (host, type_id, file_conf["type"])
+    #print "STREAMS:", file_conf["streams"]
 
     file_conf["type_id"] = type_id
-    file_conf["type_id_pif"] = "%s_pif%d" % (file_conf[type_id], file_conf["packets_in_flight"])
+    print "streams:", file_conf["streams"]
+    print "streams_id:", file_conf["streams_id"]
 
+    file_conf["type_id_pif"] = "%s_pif%d" % (file_conf["streams"][file_conf["streams_id"]]["type"], file_conf["packets_in_flight"])
+
+    file_conf["type"] = file_conf["streams"][file_conf["streams_id"]]["type"]
     file_conf["itt"] = ""
-    if itt_id in file_conf:
-        file_conf["itt"] = file_conf[itt_id]
+
+    if "itt" in file_conf["streams"][file_conf["streams_id"]]:
+        file_conf["itt"] = file_conf["streams"][file_conf["streams_id"]]["itt"]
 
     file_conf["payload"] = ""
     #print "KEYS:", file_conf.keys()
-    if payload_id in file_conf:
-        file_conf["payload"] = file_conf[payload_id]
+    #if payload_id in file_conf:
+    #    file_conf["payload"] = file_conf[payload_id]
 
-    file_conf["stream_id"] = host_stream_id_map[host][file_conf["type"]] % file_conf
+    #file_conf["stream_id"] = host_stream_id_map[host][file_conf["type"]] % file_conf
 
     if "cap" in file_conf:
         plot_conf["plot_conf"]["bandwidth"] = file_conf["cap"]
 
     file_conf["src_ip"] = host_ip_map[host]
     file_conf["dst_ip"] = dst_ip
-    file_conf["data_file_name"] = "%s-%s" % (basename.split(plot_conf["file_parse"]["pcap_suffix"])[0], file_conf["type_thin1"])
+    file_conf["data_file_name"] = "%s" % (basename.split(plot_conf["file_parse"]["pcap_suffix"])[0])
+    file_conf["common_prefix"] = "%s" % (basename.split(host)[0])
 
 
 def _get_default_conf():
-    default_conf = { "file_parse": {}, "plot_conf": {}}
+    default_conf = {"file_parse": {}, "plot_conf": {}}
     default_conf["file_parse"]["pcap_suffix"] = ".pcap"
     default_conf["file_parse"]["files_match_exp"] = "*%s" % default_conf["file_parse"]["pcap_suffix"]
     default_conf["file_parse"]["files_nomatch_regex"] = []
     default_conf["process_results"] = None
     default_conf["print_page_title"] = True
     default_conf["results"] = {}
+    default_conf["view_results"] = {"execute": False, "viewers": {".txt": "cat", ".pdf": "evince"} }
 
     default_conf["file_parse"]["files_match_regex"] = []
 
+    if graph.args.view_results:
+        default_conf["view_results"]["execute"] = True
+
     if graph.args.file_regex_match:
         default_conf["file_parse"]["files_match_regex"].append(graph.args.file_regex_match)
     if graph.args.file_regex_nomatch:

File graph_default_bendik.py

 
 
 #dstPorts = { 'rdbsender': 5000, 'zsender': 5001, 'ysender': 5002, "bridge2-eth2": 5002 }
-dstPorts = { 'rdbsender': 5000, 'zsender': 5001, 'ysender': 5000 }
-dstPorts2 = { 'rdbsender': 5000, 'zsender': 5001, 'ysender': 5002 }
+dstPorts = { 'rdbsender': 5000, 'zsender': 5001, 'ysender': 5002, 'wsender': 5003 }
 set_order = {0: 1, 3: 2, 6: 3, 10: 4, 20: 5}
 
 
     "(cap_(?P<cap>\d+)kbit_)?"\
     "duration_(?P<duration>\d+)m_payload_(?P<payload_thin1>\d+)_itt_(?P<itt_thin1>\d+)_rtt_(?P<rtt>\d+)_"\
     "loss_(?P<loss>.*)_p_in_flight_(?P<packets_in_flight>\d+)(_queue_len_(?P<queue_len>\d+))?(_delay_(?P<delay_type>[a-z]+))?(_num_(?P<num>\d+))?(?P<extra>.+)?_(?P<hostname>.+)\."
-filename_regexes.append(filename_regex)
+filename_regexes.append((filename_regex, None))
 
 # 60_thin_tcp_vs_60_thick_stream_cap_10000kbit_duration_20m_payload_120_itt_100_rtt_150_loss__p_in_flight_0_queue_len_122_delay_fixed_num_1_clps_thin_1_clps_thick_0_segoff_off_segoff_thin_on_segoff_thick_off_brate_10_duplex_half_zsender.pcap
 #60_thin_tcp_vs_60_thick_stream_cap_10000kbit_duration_20m_payload_120_itt_100_rtt_150_loss__p_in_flight_0_queue_len_122_delay_fixed_num_0_clps_thin_1_clps_thick_0_segoff_off_segoff_thin_on_segoff_thick_off_zsender.pcap
     "(cap_(?P<cap>\d+)kbit_)?"\
     "duration_(?P<duration>\d+)m_payload_(?P<payload_thin1>\d+)_itt_(?P<itt_thin1>\d+)_rtt_(?P<rtt>\d+)_"\
     "loss_(?P<loss>[^_]*)_p_in_flight_(?P<packets_in_flight>\d+)(_queue_len_(?P<queue_len>\d+))?(_delay_(?P<delay_type>[^_]+))?(_num_(?P<num>\d+))?(?P<extra>.+)?_(?P<hostname>[^_]+)\."
-filename_regexes.append(filename_regex)
+filename_regexes.append((filename_regex, None))
 
 #8_thin_rdb_vs_8_thick_stream_cap_5000kbit_duration_15m_payload_100_itt_50_rtt_100_loss__p_in_flight_6_queue_len_41_num_0_zreceiver.pcap
 #32_thin_tcp_vs_32_cap_2000kbit_thick_stream_duration_20m_payload_100_itt_100_rtt_100_loss__p_in_flight_0_num_5_rdbsender.pcap
     "(cap_(?P<cap>\d+)kbit_)?"\
     "duration_(?P<duration>\d+)m_payload_(?P<payload_thin1>\d+)_itt_(?P<itt_thin1>\d+)_rtt_(?P<rtt>\d+)_"\
     "loss_(?P<loss>.*)_p_in_flight_(?P<packets_in_flight>\d+)(_queue_len_(?P<queue_len>\d+))?(_delay_(?P<delay_type>[a-z]+))?(_num_(?P<num>\d+))?_(?P<hostname>.+)\."
-filename_regexes.append(filename_regex)
+filename_regexes.append((filename_regex, None))
 
 filename_regex = "(?P<streams>t(?P<stream_count_thin1>\d+)-(?P<type>[^_]+)?_vs_g(?P<stream_count_thick>\d+))_kbit(?P<cap>\d+)_min(?P<duration>\d+)_ps(?P<payload_thin1>\d+)_itt(?P<itt_thin1>\d+)_rtt(?P<rtt>\d+)_loss_pif(?P<packets_in_flight>\d+)_(?:qlen(?P<queue_len>\d+))_(?:delay(?P<delay_type>[^_]+))_(num(?P<num>\d+))?(?P<extra>.+)?_(?P<hostname>[^_]+)\."
-filename_regexes.append(filename_regex)
+filename_regexes.append((filename_regex, None))
 
 #t60-tcp_vs_g60_kbit10000_min20_ps120_itt100_rtt150_loss_pif0_qlen122_delayfixed_num0_clps-t-0_clps-g-0_soff-off_soff-t-0_soff-g-0_brate-speed-10-duplex-full_bridge2-eth1.pcap
 filename_regex = "(?P<streams>t(?P<stream_count_thin1>\d+)-(?P<type_thin1>[^-]+)-itt(?P<itt_thin1>[^-]+)-ps(?P<payload_thin1>[^-]+)(:?_vs_t(?P<stream_count_thin2>\d+)-(?P<type_thin2>[^-]+)-itt(?P<itt_thin2>[^-]+)-ps(?P<payload_thin2>[^-]+))?_vs_g(?P<stream_count_thick>\d+))_kbit(?P<cap>\d+)_min(?P<duration>\d+)_rtt(?P<rtt>\d+)_loss_pif(?P<packets_in_flight>\d+)_(?:qlen(?P<queue_len>\d+))_(?:delay(?P<delay_type>[^_]+))_(num(?P<num>\d+))?(?P<extra>.+)?_(?P<hostname>[^_]+)\."
-filename_regexes.append(filename_regex)
+filename_regexes.append((filename_regex, None))
 
 #t1-rdb-itt1-ps450-ccvegas_vs_g2_kbit5000_min10_rtt150_loss_pif300_qlen30_delayfixed_num0_rdbsender.pcap
 #filename_regex = "(?P<streams>t(?P<stream_count_thin1>\d+)-(?P<type_thin1>[^-]+)-itt(?P<itt_thin1>[^-]+)-ps(?P<payload_thin1>[^-]+)-cc(?P<cong_control_thin1>[^-]+)(:?_vs_t(?P<stream_count_thin2>\d+)-(?P<type_thin2>[^-]+)-itt(?P<itt_thin2>[^-]+)-ps(?P<payload_thin2>[^-]+))?_vs_g(?P<stream_count_thick>\d+))_kbit(?P<cap>\d+)_min(?P<duration>\d+)_rtt(?P<rtt>\d+)_loss_pif(?P<packets_in_flight>\d+)_(?:qlen(?P<queue_len>\d+))_(?:delay(?P<delay_type>[^_]+))_(num(?P<num>\d+))?(?P<extra>.+)?_(?P<hostname>[^_]+)\."
 #filename_regexes.append(filename_regex)
 
+
+def parse_elements(elements):
+    conf = {"cong_control": "cubic",
+    }
+    def parse(regex, e):
+        m = re.match(regex, e, flags=re.DOTALL|re.VERBOSE)
+        if m:
+            conf.update(m.groupdict())
+        return m is not None
+
+    conf["type"] = "tcp"
+    parse("(?P<id>.)(?P<stream_count>\d+)", elements.pop(0))
+
+    if elements:
+        # Stream type is special case since it has no prefix
+        if elements[0] == "rdb" or elements[0] == "tcp":
+            conf["type"] = elements.pop(0)
+        properties_regexes = ["itt(?P<itt>[^-]+)",
+                              "ps(?P<payload>[^-]+)",
+                              "rc(?P<retrans_collapse>[^-]+)",
+                              "cc(?P<cong_control>[^-]+)",
+                              "da(?P<thin_dupack>[^-]+)",
+                              "lt(?P<linear_timeout>[^-]+)",
+                              "pif(?P<packets_in_flight>[^-]+)"
+                          ]
+        while elements:
+            e = elements.pop()
+            for reg in properties_regexes:
+                if parse(reg, e):
+                    break
+    # Convert ints
+    for k in conf:
+        if type(conf[k]) is str:
+            if conf[k].isdigit():
+                conf[k] = int(conf[k])
+    return conf
+
+#t21-tcp-itt100-ps120-cccubic-da1-lt1_vs_g10_kbit5000_min5_rtt150_loss_pif0_qlen30_delayfixed_num0_zreceiver.pcap
+def parse_fname(fname):
+    split_on = None
+    if fname.find("..") == -1:
+        i = fname.find("_kbit")
+        streams = fname[:i]
+        common = fname[i+1:]
+    else:
+        streams, common = fname.split("..")
+    streams = streams.split("_vs_")
+    #print "streams:", streams
+    #print "common:", common
+
+    #kbit5000_min5_rtt150_loss_pif0_qlen30_delayfixed_num0_zreceiver.pcap
+    common_regex = "kbit(?P<cap>\d+)_min(?P<duration>\d+)_rtt(?P<rtt>\d+)_loss_pif(?P<packets_in_flight>\d+)_(?:qlen(?P<queue_len>\d+))_(?:delay(?P<delay_type>[^_]+))_(num(?P<num>\d+))?(?P<extra>.+)?_(?P<hostname>[^_]+)\."
+    regexes = ["r(?P<stream_count_thin1>\d+)-(?P<type_thin1>[^-]+)-itt(?P<itt_thin1>[^-]+)-ps(?P<payload_thin1>[^-]+)-cc(?P<cong_control_thin1>[^-]+)(-da(?P<thin_dupack>[^-]+))?(-lt(?P<linear_timeout>[^-]+))",
+               "t(?P<stream_count_thin1>\d+)-(?P<type_thin1>[^-]+)-itt(?P<itt_thin1>[^-]+)-ps(?P<payload_thin1>[^-]+)-cc(?P<cong_control_thin1>[^-]+)(-da(?P<thin_dupack>[^-]+))?(-lt(?P<linear_timeout>[^-]+))",
+               "y(?P<stream_count_thin2>\d+)-(?P<type_thin2>[^-]+)-itt(?P<itt_thin2>[^-]+)-ps(?P<payload_thin2>[^-]+)",
+               "z(?P<stream_count_thick>\d+)",
+               "g(?P<stream_count_thick>\d+)"]
+
+
+    #t6-rdb-itt10-ps400-ccrdb-da0-lt0', 'y6-tcp-itt10-ps400', 'z6
+
+    m = re.match(common_regex, common, flags=re.DOTALL|re.VERBOSE)
+    file_conf = m.groupdict()
+
+    file_conf["streams"] = {}
+    for s in streams:
+        #print "S:", s
+        elements = s.split("-")
+        conf = parse_elements(elements)
+        #print "conf:", conf
+        file_conf["streams"][conf["id"]] = conf
+        for reg in regexes:
+            #print "reg:", reg
+            m = re.match(reg, s, flags=re.DOTALL|re.VERBOSE)
+            if m:
+                file_conf.update(m.groupdict())
+                break
+    return file_conf
+
 #t1-rdb-itt1-ps450-ccvegas_vs_g2_kbit5000_min10_rtt150_loss_pif300_qlen30_delayfixed_num0_rdbsender.pcap
 filename_regex = "(?P<streams>t(?P<stream_count_thin1>\d+)-(?P<type_thin1>[^-]+)-itt(?P<itt_thin1>[^-]+)-ps(?P<payload_thin1>[^-]+)-cc(?P<cong_control_thin1>[^-]+)(-da(?P<thin_dupack>[^-]+))?(-lt(?P<linear_timeout>[^-]+))?(:?_vs_t(?P<stream_count_thin2>\d+)-(?P<type_thin2>[^-]+)-itt(?P<itt_thin2>[^-]+)-ps(?P<payload_thin2>[^-]+))?_vs_g(?P<stream_count_thick>\d+))_kbit(?P<cap>\d+)_min(?P<duration>\d+)_rtt(?P<rtt>\d+)_loss_pif(?P<packets_in_flight>\d+)_(?:qlen(?P<queue_len>\d+))_(?:delay(?P<delay_type>[^_]+))_(num(?P<num>\d+))?(?P<extra>.+)?_(?P<hostname>[^_]+)\."
-filename_regexes.append(filename_regex)
+filename_regexes.append((filename_regex, parse_fname))
+
+filename_regex = ".*kbit(?P<cap>\d+)_min(?P<duration>\d+)_rtt(?P<rtt>\d+)_loss_pif(?P<packets_in_flight>\d+)_(?:qlen(?P<queue_len>\d+))_(?:delay(?P<delay_type>[^_]+))_(num(?P<num>\d+))?(?P<extra>.+)?_(?P<hostname>[^_]+)\."
+
+#filename_regexes.append((filename_regex, parse_fname))
+#t21-tcp-itt100-ps120-cccubic-da1-lt1_vs_g10_kbit5000_min5_rtt150_loss_pif0_qlen30_delayfixed_num0_zreceiver.pcap
+
+
+filename_regexes.append((filename_regex, parse_fname))
+
+#t6-rdb-itt10-ps400-ccrdb-da0-lt0_vs_y6-tcp-itt10-ps400_vs_z6..kbit5000_min2_rtt150_loss_pif200_qlen30_delayfixed_num0_rdbsender.pcap
+
+
+def get_latency_conf():
+    print "get_latency_conf"
+    from latency import get_latency_conf as get_latency_conf_original
+    import latency
+    conf = get_latency_conf_original()
+    conf["box_conf"] = { "key": "Streams:%(stream_count_thin1)s_Streams_Greedy:%(stream_count_thick)s_Payload:%(payload_thin1)s_ITT:%(itt_thin1)s_RTT:%(rtt)s_PIFG:%(group)s_%(cong_control_thin1)s_%(duration)s_%(thin_dupack)s_%(linear_timeout)s",
+                         "sort_key_func": lambda x: (x["stream_count_thin1"], x["stream_count_thick"], x["group"], x["itt_thin1"]),
+                         "sort_keys": ["stream_count_thin1", "stream_count_thick", "group", "itt_thin1"],
+                         "func" : latency.latency_box_key_func,
+                         "latency_options": {"per_stream": False},
+                         "box_title_def" : "Thin count: %(stream_count_thin1)s Greedy: %(stream_count_thick)s  ITT: %(itt_thin1)sms" }
+    conf["set_conf"] = { "key": "Streams:%(stream_count_thin1)s_Payload:%(payload_thin1)s_ITT:%(itt_thin1)s_RTT:%(rtt)s_PIFG:%(group)s" }
+    conf["page_group_def"] = {#"title": "Payload: %(payload_thin1)s  RTT: %(rtt)sms  ITT: %(itt_thin1)sms  Queue Len: %(queue_len)s  Duration: %(duration)s min",
+        "title": "%(stream_count_thin1)d Thin streams vs %(stream_count_thick)d Greedy streams - Duration: %(duration)s min\n"
+        "Payload thin streams: %(payload_thin1)s   RTT: %(rtt)sms   ITT: %(itt_thin1)sms   Queue Len: %(queue_len)s packets",
+        "sort_keys": ["payload_thin1", "rtt", "stream_count_thin1"], "sort_key_func": lambda x: (x[0], x[1], x[2])}
+    conf["plot_conf"].update({"n_columns": 1, "n_rows" : 1, "x_axis_lim" : [150, 1200], "y_axis_lim" : [.5, 1],
+                              "y_axis_title" : "Percentage of ACK Latencies",
+                              "x_axis_title" : {
+                "title": ["ACK Latency in milliseconds"],
+                },
+                              "plot_func": plot_ecdf_box,
+                              "legend": { "color_func": latency.get_color_latency, "values": {"type_thick": "Greedy", "type_thin1": "Thin", "type_thin2": "Thin2", "type_thin3": "Thin3"}},
+                              #"box_commands": thin_itt_mark_line_func,
+                              "r.plot_args": {"cex.main": 0.9 },
+                              "r.mtext_page_title_args": {"cex.main": 1.2 },
+                              }
+                             )
+    conf["paper"] = "special"
+    conf["paper_width"] = 10
+    conf["paper_height"] = 8
+    return conf
 
 def get_default_conf():
     from graph_default import _get_default_conf as get_conf
             args = {"ylim": IntVector(conf["plot_conf"]["y_axis_lim"]), "xlim": x_axis_lim, "xaxt": "n", "xlab": "", "yaxt": "n",
                     "border": StrVector([plot["color"]])}
             if "column" in plot:
-                args["at"] = IntVector([plot["column"]])
-                plot_labels[plot["column"]] = plot["label"]
+                col = plot["column"](plot)
+                args["at"] = IntVector([col])
+                plot_labels[col] = plot["label"]
 
             r.boxplot(data, main=plot_box_dict["box_title"], add=add, varwidth=True, **args)
             add = True
             #print "type_id_pif:", plot["type_id_pif"]
             #print 'plot[plot["type_id"]]', plot[plot["type_id"]]
             if "legend" in conf["plot_conf"]:
+                #print "Values:", conf["plot_conf"]["legend"]["values"]
+                #print 'plot["type_id"]:', plot["type_id"]
                 legend_attr[conf["plot_conf"]["legend"]["values"][plot["type_id"]]] = conf["plot_conf"]["legend"]["color_func"](plot)
                 plot["color"] = conf["plot_conf"]["legend"]["color_func"](plot)
                 legend_colors.append(plot["color"])
                 legend_labels.append(conf["plot_conf"]["legend"]["values"][plot["type_id"]])
+                print 'color:', plot["color"]
+                print 'label:', conf["plot_conf"]["legend"]["values"][plot["type_id"]]
 
             data = plot["ecdf_values"]
             if not type(data) is list:
                 data = [data]
 
+            print "Data:", len(data)
             for i, d in enumerate(data):
                 label_curves = r('list(method="arrow", cex=.8)')
                 args = plot_box_dict["r.plot_args"]
                 if "y_axis_lim" in plot_box_dict:
                     args["ylim"] = FloatVector(plot_box_dict["y_axis_lim"])
 
+                print "col:", plot["color"]
                 p = Ecdf(d,
                          #group=group,
                          #label_curves=label_curves,

File graph_r_ggplot.py

 import graph
 from util import cprint
 from collections import OrderedDict
+from util import frange
 
 plyr = importr("plyr")
 zoo = importr("zoo")
 scales = importr("scales")
 RColorBrewer = importr("RColorBrewer")
 
-def scatterplot_cwnd_thick(conf, plot_box_dict, plot, gp, table_data, colours, color_labels, aes_mapping):
-    data = plot["data"]
+def do_stream_custom_data_and_color(plot, plot_box_dict, data, table_data, color_conf, stream_properties):
     counts = plyr.count(data, vars="type_id")
     streams_count = plyr.count(data, vars="stream")
-
-    #d = counts.rx(counts.rx2("type_id").ro != "type_thin1", True).rx2(2)
-    table_data["data_points"].append(counts[1][0] if counts else rpy2.rinterface.NA_Integer)
-
-    #data_subset_thick = data.rx(data.rx2("type_id").ro == "type_thick", True)
+    streams = streams_count.rx2("stream")
+    breaks = []
+    for v in streams.iter_labels():
+        breaks.append(v)
+
+    table_data["Data points"].append(counts[1][0] if counts else rpy2.rinterface.NA_Integer)
+    table_data["Name"].append(stream_properties["legend_name"])
+
+    def make_colors(name, code, count):
+        colors = r(code)
+        while len(colors) < count:
+            colors += colors
+        colors = colors[:count]
+        color_labels = ["%s %d" % (name, i+1) for i in range(0, count)]
+        return colors, color_labels
+
+    colors, labels = make_colors(stream_properties["legend_name"], stream_properties["color_code"], stream_properties["stream_count"])
+    color_conf["colors"] += colors
+    color_conf["color_labels"] += labels
+    color_conf["breaks"] += breaks
+
+    stream_properties = plot["streams"][plot["streams_id"]]
+    if "table_data_keys" in plot_box_dict:
+        data_keys = plot_box_dict["table_data_keys"]
+        for data_key in data_keys:
+            if data_keys[data_key].get("location", None) == "table_data":
+                continue
+            value = rpy2.rinterface.NA_Integer if data_keys[data_key]["type"] == "int" else rpy2.rinterface.NA_Character
+            key = data_keys[data_key]["key"]
+            if key in stream_properties:
+                value = stream_properties[key]
+            table_data[data_key].append(value)
+
+def scatterplot_cwnd_thick(conf, plot_box_dict, plot, gp, table_data, color_conf, aes_mapping):
+    data = plot["data"]
+    stream_properties = plot["streams"][plot["streams_id"]]
+    do_stream_custom_data_and_color(plot, plot_box_dict, data, table_data, color_conf, stream_properties)
 
     average_window = 80
     ma = zoo.rollmean(data.rx2("cwnd"), average_window, fill=IntVector([rpy2.rinterface.NA_Integer for i in range(average_window)]))
     #print "MA LEN:", len(ma)
-    data[2] = ma
-    #print "cwnd LEN:", len(data.rx2("cwnd"))
+    #data[2] = ma
 
     if gp is None:
         gp = ggplot2.ggplot(data)
-    gp += aes_mapping
+        gp += aes_mapping
+
     gp += ggplot2.geom_line(data=data)
+    gp += ggplot2.geom_point(data=data)
     return gp
 
-
-def scatterplot_cwnd_thin(conf, plot_box_dict, plot, gp, table_data, colours, color_labels, aes_mapping):
+def scatterplot_cwnd_thin(conf, plot_box_dict, plot, gp, table_data, color_conf, aes_mapping):
     data = plot["data"]
-    counts = plyr.count(data, vars="type_id")
-    streams_count = plyr.count(data, vars="stream")
-
-    #print "COLS:", data.names
-    #print "LEVELS:", r.levels(data[1])
-    #data = data.rx(data.rx2("type_id").ro == "type_thin1", True)
-    #d = counts.rx(counts.rx2("type_id").ro == "type_thin1", True).rx2(2)
-
-    table_data["data_points"].append(counts[1][0] if counts else rpy2.rinterface.NA_Integer)
+    stream_properties = plot["streams"][plot["streams_id"]]
+    do_stream_custom_data_and_color(plot, plot_box_dict, data, table_data, color_conf, stream_properties)
 
     if gp is None:
         gp = ggplot2.ggplot(data)
+        gp += aes_mapping
 
-    gp += aes_mapping
-
-    if plot["stream_count_thin1"] >= 1:
+    if stream_properties["stream_count"] >= 1:
         gp += ggplot2.geom_line(data=data)
         gp += ggplot2.geom_point(data=data)
         pass
     else:
-        smooth_line_groups = "interaction(factor(stream), factor(type))"
+        #smooth_line_groups = "interaction(factor(stream), factor(type))"
+        smooth_line_groups = "interaction(factor(stream))"
         gp += ggplot2.stat_smooth(data=data,
                                   method='loess', formula=Formula("y ~ x"), size=0.6, span=0.09, n=80,
                                   mapping=ggplot2.aes_string(x='time', y='cwnd', group=smooth_line_groups,
-                                                             #col='factor(stream)'
-                                                             #col='factor(stream, levels=c(%s))' % levels_thin
-                                                             #,linetype='stream'
-                                                             )
+                                                         )
                                   ,se=False
                                   #,color=scale_alpha_color2
                                   )
     #gp = ggplot2.ggplot(mapping=ggplot2.aes_string(x='time', y='cwnd', col='factor(stream)'))
     gp = None
     data = None
-    table_data = {"data_points": []}
+    table_data = {"Data points": list()}
     table_data["Bytes Loss"] = [""]
+    table_data["Name"] = []
+    if "table_data_keys" in plot_box_dict:
+        table_data.update({key: list() for key in plot_box_dict["table_data_keys"]})
+
     print "box_title:", plot_box_dict["box_title"]
 
     mapping = ggplot2.aes_string(x='time', y='cwnd', col='factor(stream)')
-
-    def get_colours(plot):
-        # Fix colors
-        thick_colours = r('c(brewer.pal(9, "Reds")[3:8])')
-        thin_colours = r('c(brewer.pal(9, "Blues")[3:8])')
-
-        while len(thin_colours) < plot["stream_count_thin1"]:
-            thin_colours += thin_colours
-        while len(thick_colours) < plot["stream_count_thick"]:
-            thick_colours += thick_colours
-
-        thin_colours = thin_colours[:plot["stream_count_thin1"]]
-        thick_colours = thick_colours[:plot["stream_count_thick"]]
-        colours = thin_colours + thick_colours
-        color_labels = ["Thin %d" % (i+1) for i in range(0, plot["stream_count_thin1"])]
-        color_labels += ["Greedy %d" % (i+1) for i in range(0, plot["stream_count_thick"])]
-
-        return colours, color_labels
+    color_conf = {"colors": [], "color_labels": [], "breaks": []}
 
     for plot_set in sets.itervalues():
         for plot in plot_set["plots"]:
-            colours, color_labels = get_colours(plot)
-
-            # Limit data based on time on x axis
             data = plot["data"]
+            #print "stream_type:", plot["stream_type"]
+            # Limit data based on time on x axis
             plot["data"] = data.rx((data.rx2("time").ro < x_axis_time_lim_max).ro & (data.rx2("time").ro > x_axis_time_lim_min), True)
-            if plot["stream_type"] == "thin":
-                gp = scatterplot_cwnd_thin(conf, plot_box_dict, plot, gp, table_data, colours, color_labels, mapping)
+            if plot["stream_type"] != "thick":
+                gp = scatterplot_cwnd_thin(conf, plot_box_dict, plot, gp, table_data, color_conf, mapping)
             else:
-                gp = scatterplot_cwnd_thick(conf, plot_box_dict, plot, gp, table_data, colours, color_labels, mapping)
-
-    gp += ggplot2.ggplot2.scale_colour_manual(values=colours
+                gp = scatterplot_cwnd_thick(conf, plot_box_dict, plot, gp, table_data, color_conf, mapping)
+
+    color_values = ""
+    breaks = color_conf["breaks"]
+    colors = color_conf["colors"]
+    for i in range(len(breaks)):
+        b = breaks[i]
+        c = colors[i]
+        if color_values:
+            color_values += ", "
+        color_values += '"%s" = "%s"' % (b, c)
+
+    color_values = r('c(%s)' % color_values)
+    gp += ggplot2.ggplot2.scale_colour_manual(values=color_values
                                               , name="Streams"
-                                              #, breaks=StrVector(("One", "Two", "Three"))
-                                              #, breaks=StrVector(("10.0.0.13:12000-10.0.0.22:5001", "10.0.0.13:12001-10.0.0.22:5001", "10.0.0.12:22000-10.0.0.22:5000"))
-                                              , labels=StrVector(color_labels)
+                                              , labels=StrVector(color_conf["color_labels"])
+                                              , breaks=StrVector(color_conf["breaks"])
                                               #, palette="Red"
                                               #, guide="colourbar"
                                               )
     gp += ggplot2.ggtitle(plot_box_dict["box_title"])
     gp += r.xlab("Time in seconds")
     gp += r.ylab("Congestion window size")
-    gp += ggplot2.scale_x_continuous(breaks=IntVector(range(x_axis_time_lim_min, x_axis_time_lim_max +1, 100))
-                                     #,minor_breaks=seq(50,60,by=2)
-                                     ,limits=IntVector((x_axis_time_lim_min, x_axis_time_lim_max))
-                                     )
-    gp += ggplot2.theme(**{"legend.position": "right",
-                           #"axis.title.y": ggplot2.element_blank()
-                           })
-    gp += ggplot2.guides(color=ggplot2.guide_legend(ncol=1 + (len(colours) // 14))) #, title.hjust = 0.4, title.theme = theme_text(size = 12, face = "bold")))
-
-    custom_data = OrderedDict([("Type", StrVector((plot["type_thin1"], plot["type_thick"]))),
-                               ("Streams", IntVector((plot["stream_count_thin1"], plot["stream_count_thick"]))),
-                               ("Cong", StrVector((plot["cong_control_thin1"], "cubic"))),
-                               ("PIF Lim", IntVector((plot["packets_in_flight"], rpy2.rinterface.NA_Integer))),
-                               ("Data points", IntVector(table_data["data_points"])),
-                               ("ITT", StrVector((plot["itt_thin1"], rpy2.rinterface.NA_Character))),
-                               ("Payload", IntVector((plot["payload_thin1"], rpy2.rinterface.NA_Integer))),
-                               ("Bytes Loss %", StrVector(table_data["Bytes Loss"])),
-                               ])
-
-    columns_data = DataFrame(custom_data)
+
+    if "x_axis_lim" in plot_box_dict:
+        args = {}
+        args["limits"] = IntVector((x_axis_time_lim_min, x_axis_time_lim_max))
+        if "x_axis_breaks" in conf["plot_conf"]:
+            args["breaks"] = FloatVector(frange(x_axis_time_lim_min, x_axis_time_lim_max +1, conf["plot_conf"]["x_axis_breaks"]))
+        if "x_axis_minor_breaks" in conf["plot_conf"]:
+            args["minor_breaks"] = FloatVector(frange(x_axis_time_lim_min, x_axis_time_lim_max +1, conf["plot_conf"]["x_axis_minor_breaks"]))
+        gp += ggplot2.scale_x_continuous(**args)
+
+    if "y_axis_lim" in plot_box_dict:
+        y_axis_time_lim_min, y_axis_time_lim_max = plot_box_dict["y_axis_lim"]
+        args = {}
+        args["limits"] = IntVector((y_axis_time_lim_min, y_axis_time_lim_max))
+        if "y_axis_breaks" in conf["plot_conf"]:
+            args["breaks"] = FloatVector(frange(y_axis_time_lim_min, y_axis_time_lim_max +1, conf["plot_conf"]["y_axis_breaks"]))
+        if "y_axis_minor_breaks" in conf["plot_conf"]:
+            args["minor_breaks"] = FloatVector(frange(y_axis_time_lim_min, y_axis_time_lim_max +1, conf["plot_conf"]["y_axis_minor_breaks"]))
+        gp += ggplot2.scale_y_continuous(**args)
+
+    theme_args = {"legend.position": "right",
+                  #"axis.title.y": ggplot2.element_blank()
+                  #"panel.grid.major": ggplot2.ggplot2.element_line(size=2)
+    }
+    if "theme_args" in conf["plot_conf"]:
+        theme_args.update(conf["plot_conf"]["theme_args"])
+
+    gp += ggplot2.theme(**theme_args)
+    gp += ggplot2.guides(color=ggplot2.guide_legend(ncol=1 + (len(colors) // 14))) #, title.hjust = 0.4, title.theme = theme_text(size = 12, face = "bold")))
+
+    custom_data = []
+    if "table_data_keys" in plot_box_dict:
+        data_keys = plot_box_dict["table_data_keys"]
+        for data_key in data_keys:
+            vector = IntVector if data_keys[data_key]["type"] == "int" else StrVector
+            try:
+                custom_data.append((data_key, vector(table_data[data_key])))
+            except ValueError, e:
+                print "data_key:", data_key
+                print "Data:", table_data[data_key]
+                raise
+
+    custom_data = OrderedDict(custom_data)
     column_names = custom_data.keys()
-    table = gridExtra.tableGrob(columns_data, cols=StrVector(column_names), name="TABLE")
+    table = gridExtra.tableGrob(DataFrame(custom_data), cols=StrVector(column_names), name="TABLE")
     plot_and_table = gridExtra.arrangeGrob(gp, table, nrow=2, ncol=1, heights=FloatVector((10, 3)), lengths=FloatVector((10, 3)))
     return plot_and_table
 
     gp += ggplot2.theme(
         **{"legend.position": "left", #"axis.title.y": ggplot2.element_blank()
            })
-    gp += ggplot2.scale_colour_identity(guide="legend", name="Stream types"
+    gp += ggplot2.scale_colour_identity(guide="legend", name="Stream types2"
                                         , breaks=StrVector(("darkblue", "darkred"))
                                         , labels=StrVector(("Thin", "Greedy"))
                                         )
 
 
 def plot_ecdf_box_ggplot(conf, plot_box_dict, sets):
-
     items_to_plot = []
     title = plot_box_dict["box_title"]
     # Column 0 should show the y axis labels
             #     q=FloatVector(percentiles),
             #     )
 
-            print "data:", type(data)
-
             gp = ggplot2.ggplot(data)
             pp = (gp
                   + ggplot2.ggtitle(plot_box_dict["box_title"])
 
 
 
+def ggplot_ecdf(conf, plot_box_dict, plot, gp):
+    #data = plot["data"]
+    #
+    #thick_colours = r('c(brewer.pal(9, "Reds")[3:8])')
+    #thin_colours = r('c(brewer.pal(9, "Blues")[3:8])')
+    #
+    #x_axis_time_lim_min, x_axis_time_lim_max = plot_box_dict["x_axis_lim"]
+    #data = data.rx((data.rx2("X").ro < x_axis_time_lim_max).ro & (data.rx2("X").ro > x_axis_time_lim_min), True)
+
+    #print "ggplot_ecdf_box - color:", plot["color"]
+
+    title = plot_box_dict["box_title"]
+    data = plot["ecdf_ggplot_values"]
+
+    if gp is None:
+        gp = ggplot2.ggplot(data)
+
+    gp = (gp
+          + ggplot2.aes_string(x='Y', col='factor(color_column)')
+          + ggplot2.ggplot2.stat_ecdf(data=data)
+          #+ ggplot2.guides(color=ggplot2.guide_legend(ncol=1 + (len(colours) // 14))) #, title.hjust = 0.4, title.theme = theme_text(size = 12, face = "bold")))
+          )
+    return gp
+
+
+def ggplot_cdf_box(conf, plot_box_dict, sets):
+    """
+    Creates a scatterplot
+
+    """
+    plot_labels = {}
+    items_to_plot = []
+
+    #x_axis_time_lim_min, x_axis_time_lim_max = plot_box_dict["x_axis_lim"]
+    gp = None
+    data = None
+    colors = []
+    labels = []
+
+    for plot_set in sets.itervalues():
+        for plot in plot_set["plots"]:
+            colors.append(plot["color"])
+            labels.append(plot["stream_type"])
+            gp = ggplot_ecdf(conf, plot_box_dict, plot, gp)
+
+    if "y_axis_title" in plot_box_dict:
+        gp += r.ylab(plot_box_dict["y_axis_title"])
+    if "x_axis_title" in plot_box_dict:
+        gp += r.xlab(plot_box_dict["x_axis_title"])
+
+    gp += ggplot2.ggtitle(plot_box_dict["box_title"])
+    #gp += ggplot2.scale_x_continuous(breaks=IntVector(range(x_axis_time_lim_min, x_axis_time_lim_max +1, 5))
+    #                                 ,minor_breaks=IntVector(range(x_axis_time_lim_min, x_axis_time_lim_max +1, 5))
+    #                                 ,limits=IntVector((x_axis_time_lim_min, x_axis_time_lim_max))
+    #                                 )
+
+    # Make legend
+    gp += ggplot2.theme(
+        **{"legend.position": "right", #"axis.title.y": ggplot2.element_blank()
+           })
+
+    gp += ggplot2.scale_colour_identity(guide="legend", name="Stream types"
+                                        #, breaks=StrVector(("darkblue", "darkred"))
+                                        #, labels=StrVector(("Thin", "Greedy"))
+                                        , breaks=StrVector(colors)
+                                        , labels=StrVector(labels)
+                                        )
+    return gp
+
+
+
 def do_ggplots(conf, plot_groups):
     plot_list = plot_groups["plot_boxes"]
     n_rows = conf["plot_conf"]["n_rows"]
     n_columns = conf["plot_conf"]["n_columns"]
 
-    r.par(**{"cex.axis": 1.3}) # Size of axis labels (relative to default)
+    #r.par(**{"cex.axis": 1.3}) # Size of axis labels (relative to default)
     # This is the margins for all the plots
     #r.par(oma=IntVector(page_margins)) # bottom, left, top, right)
     # Rows and columns in the grid
-    r.par(mfrow=IntVector((n_rows, n_columns)))
+    #r.par(mfrow=IntVector((n_rows, n_columns)))
 
     plots_per_page = n_columns * n_rows
     plots_on_last_page = len(plot_list) % plots_per_page
         col_count = conf["plot_conf"]["n_columns"]
         row_count = conf["plot_conf"]["n_rows"]
 
-        if ((plots_per_page == index + 1) or (max_plots_per_page and plot_index_on_page == max_plots_per_page)):
+        if ((((index + 1) % plots_per_page) == 0) or (max_plots_per_page and plot_index_on_page == max_plots_per_page)):
             plot_index_on_page = 0
             make_plot_page(items_to_plot, row_count, col_count)
         else:
 import graph_default
 import graph
 from graph_r import *
+import glob
 
 def latency_file_parse_func(plot_conf, file_conf, pcap_file, basename, parse_func_arg):
     if graph_default.file_parse_generic(plot_conf, file_conf, pcap_file, basename, parse_func_arg) is False:
     files = []
     #t37-rdb-itt100:15-ps120-cccubic_vs_g10_kbit5000_min10_rtt150_loss_pif20_qlen30_delayfixed_num0_rdbsender-rdb-latency-all-10.0.0.12-22000-10.0.0.22-5000.dat
     d = os.path.join(file_conf["output_dir"], "%s%s" % (file_conf["prefix"], "latency-all-%(src_ip)s*" % file_conf))
-    #print "Getting files: ", d
-    import glob
-    files = glob.glob(d)
-    #print "files:", files
-
-    results_dict["latency_by_stream_files"] = files
+    results_dict["latency_by_stream_files"] = d
 
     #print "results_file:", results_dict["results_file"]
 
 
 def latency_results_parse(conf, box_conf, file_data, set_key):
     print "Reading latency file:", file_data["results"]["latency"]["results_file"]
-    dataframe = r["read.csv"](file_data["results"]["latency"]["results_file"], header=False)
-    dataframe.names[0] = "time"
-    dataframe.names[1] = "latency"
-    file_data["ecdf_ggplot2_values"] = dataframe
-    file_data["ecdf_values"] = [r["as.numeric"](dataframe[1])]
-    print "ecdf_values:", file_data["ecdf_values"]
-
-    data = []
-    for f in file_data["results"]["latency"]["latency_by_stream_files"]:
-        print "Reading latency file:", f
-        dataframe = r["read.csv"](f, header=False)
-        data.append((f, r["as.numeric"](dataframe[1])))
-    #file_data["ecdf_values"] = data
 
-def latency_box_key_func(box_conf, conf):
+    if not (box_conf["box_conf"].get("latency_options", None) and
+            box_conf["box_conf"]["latency_options"].get("per_stream", None) is True):
+        print "R1"
+        dataframe = r["read.csv"](file_data["results"]["latency"]["results_file"], header=False)
+        dataframe.names[0] = "time"
+        dataframe.names[1] = "latency"
+        file_data["ecdf_ggplot2_values"] = dataframe
+        file_data["ecdf_values"] = [r["as.numeric"](dataframe[1])]
+    else:
+        print "R2"
+        data = []
+        files = glob.glob(file_data["results"]["latency"]["latency_by_stream_files"])
 
+        for f in files:
+            print "Reading latency file:", f
+            dataframe = r["read.csv"](f, header=False)
+            data.append(r["as.numeric"](dataframe[1]))
+        file_data["ecdf_values"] = data
+
+def latency_box_key_func(box_conf, conf):
     if conf["hostname"] == "rdbsender":
         if conf["type_thin1"] == "tcp":
-            conf["color"] = "darkgoldenrod1"
+            conf["color"] = "darkblue"
         elif conf["type_thin1"] == "rdb":
             pif = "rdb_pif%d" % conf["packets_in_flight"]
             if pif in graph_default.color_map:
         if conf["type_thin1"] == "rdb":
             conf["color"] = "firebrick1"
 
+
 def thin_itt_mark_line_func(plot_conf, file_conf):
     line_start = 200
     for i in range(0, 7):
 
 def get_color_latency(plot):
     stream_id = plot["type_id"]
+    #print "streams_id:", plot["streams_id"]
+    #print "streams:", plot["streams"][plot["streams_id"]]
+
     pif_type = plot["type_id_pif"]
 
     if stream_id == "type_thick":
             return graph_default.color_map["thick"]
         return graph_default.color_map["thick2"]
 
+    print "Get_color_latency:", plot["hostname"]
+
     if plot["num"] == 0:
-        return graph_default.host_type_color_map["rdbsender"][pif_type]
+        return graph_default.host_type_color_map[plot["hostname"]][pif_type]
     elif plot["num"] == 1:
         return "cyan"
     else:
         return "darkgreen"
 
-def get_conf():
+def get_latency_conf():
     conf = deepcopy(graph_default.get_conf())
     conf["output_file"] = graph.args.latency_output_file
     conf["box_conf"] = { "key": "Streams:%(stream_count_thin1)s_Streams_Greedy:%(stream_count_thick)s_Payload:%(payload_thin1)s_ITT:%(itt_thin1)s_RTT:%(rtt)s_PIFG:%(group)s_%(cong_control_thin1)s_%(duration)s_%(thin_dupack)s_%(linear_timeout)s",
                          "sort_keys": ["thin_dupack", "linear_timeout", "stream_count_thin1", "stream_count_thick", "group"],
                          "func" : latency_box_key_func,
                          #"percentiles": [.5, .75, .9],
-                         #"box_title_def" : "%(stream_count_thin1)d vs %(stream_count_thick)d %(cong_control_thin1)s DA:%(thin_dupack)s LT:%(linear_timeout)s" }
+                         #"box_title_def" : "%(stream_count_thin1)d vs %(stream_count_thick)d %(cong_control_thin1)s " }
                          "box_title_def" : "Thin Dupack: %(thin_dupack)s   Thin Linear Timeout: %(linear_timeout)s" }
     conf["set_conf"] = { "key": "Streams:%(stream_count_thin1)s_Payload:%(payload_thin1)s_ITT:%(itt_thin1)s_RTT:%(rtt)s_PIFG:%(group)s" }
     conf["page_group_def"] = {#"title": "Payload: %(payload_thin1)s  RTT: %(rtt)sms  ITT: %(itt_thin1)sms  Queue Len: %(queue_len)s  Duration: %(duration)s min",
                               "plot_func": plot_ecdf_box,
                               #"plot_func": plot_ecdf_box_ggplot,
                               #"legend_colors": color_map
-                              "legend": { "color_func": get_color_latency, "values": {"type_thick": "Greedy", "type_thin1": "Thin"}},
+                              "legend": { "color_func": get_color_latency, "values": {"type_thick": "Greedy", "type_thin1": "Thin", "type_thin2": "Thin2", "type_thin3": "Thin3"}},
                               "box_commands": thin_itt_mark_line_func,
                               "r.plot_args": {"cex.main": 0.9 },
                               "r.mtext_page_title_args": {"cex.main": 0.9 },
     conf["file_parse"]["files_nomatch_regex"].append(".*qlen46875.*")
     #conf["plot_conf"].update({ "n_columns": 1, "n_rows" : 1})
     return conf
+
+get_conf = get_latency_conf

File plot_latency_thin_stream_mod_CDF_steps.py

+#!/usr/bin/env python
+from datetime import datetime
+import argparse
+import os
+
+from graph_base import main, parse_args
+
+from graph_default_bendik import get_default_conf
+
+import graph_default
+graph_default.get_conf = get_default_conf
+
+def get_latency_conf():
+    from latency import get_latency_conf as get_latency_conf_original
+    conf = get_latency_conf_original()
+    conf["box_conf"] = { "key": "Streams:%(stream_count_thin1)s_Streams_Greedy:%(stream_count_thick)s_Payload:%(payload_thin1)s_ITT:%(itt_thin1)s_RTT:%(rtt)s_PIFG:%(group)s_%(cong_control_thin1)s_%(duration)s_%(thin_dupack)s_%(linear_timeout)s",
+                         #"sort_key_func": lambda x: (x["stream_count_thin1"], x["stream_count_thick"], x["group"], x["itt_thin1"]),
+                         #"sort_keys": ["stream_count_thin1", "stream_count_thick", "group", "itt_thin1"],
+                         "sort_key_func": lambda x: (x["thin_dupack"], x["linear_timeout"], x["stream_count_thin1"], x["stream_count_thick"], x["group"]),
+                         "sort_keys": ["thin_dupack", "linear_timeout", "stream_count_thin1", "stream_count_thick", "group"],
+                         #"func" : latency.latency_box_key_func,
+                         "latency_options": {"per_stream": True},
+                         #"box_title_def" : "%(stream_count_thin1)d vs %(stream_count_thick)d %(cong_control_thin1)s " }
+                         "box_title_def" : "Thin Dupack: %(thin_dupack)s   Thin Linear Timeout: %(linear_timeout)s" }
+    conf["set_conf"] = { "key": "Streams:%(stream_count_thin1)s_Payload:%(payload_thin1)s_ITT:%(itt_thin1)s_RTT:%(rtt)s_PIFG:%(group)s" }
+    conf["page_group_def"] = {
+        "title": "%(stream_count_thin1)d Thin streams vs %(stream_count_thick)d Greedy streams - Duration: %(duration)s min\n"
+        "Payload thin streams: %(payload_thin1)s   RTT: %(rtt)sms   ITT: %(itt_thin1)sms   Queue Len: %(queue_len)s packets, Cong: %(cong_control_thin1)s",
+        "sort_keys": ["payload_thin1", "rtt", "stream_count_thin1"], "sort_key_func": lambda x: (x[0], x[1], x[2])}
+    conf["plot_conf"].update({"n_columns": 2, "n_rows" : 2, "x_axis_lim" : [150, 1200], "y_axis_lim" : [0.5, 1],
+                              "y_axis_title" : "Percentage of ACK Latencies",
+                              "x_axis_title" : {
+                "title": ["ACK Latency in milliseconds"],
+                },
+                              "r.plot_args": {"cex.main": 0.9 },
+                              "r.mtext_page_title_args": {"cex.main": 1.2 },
+                              }
+                             )
+    conf["paper"] = "special"
+    conf["paper_width"] = 10
+    conf["paper_height"] = 8
+    return conf
+
+
+import latency
+latency.get_conf = get_latency_conf
+
+if __name__ == "__main__":
+    import sys
+    sys.argv.extend(["-l", "RDBSENDER_ZSENDER_TEST_THIN_STREAM_MOD_latency_itt100_qlen30_CDF.pdf",
+                     "-od", "RDBSENDER_ZSENDER_TEST_THIN_STREAM_MODLAT",
+                     "-frm", "t21.*itt100-.*cubic.*vs_g10.*min5.*qlen30.*",
+                     "/root/bendiko/pcap/rdbsender_zsender_to_zreceiver_real/RDBSENDER_ZSENDER_TEST_THIN_STREAM_MOD/all_results/"])
+    parse_args()
+    main()

File plot_throughput_rdbmisuse.py

+#!/usr/bin/env python
+from datetime import datetime
+import argparse
+import os
+from graph_base import main, parse_args
+from graph_default_bendik import get_default_conf
+import graph_default
+graph_default.get_conf = get_default_conf
+
+#./bendik_graph_base.py -vvv  -d ../pcap/rdbsender_zsender_to_zreceiver_real/RDBSENDER_ZSENDER_TEST_THIN_RDBMISUSE_NvsN/all_results/ -od RDBSENDER_ZSENDER_TEST_THIN_RDBMISUSE_NvsN -t RDBSENDER_ZSENDER_TEST_THIN_RDBMISUSE_NvsN_throughput2.pdf -frm "t6.*.*itt10-ps400.*num1.*"
+
+def get_throughput_conf():
+    from throughput import get_throughput_conf as get_throughput_conf_original
+    conf = get_throughput_conf_original()
+    conf["document_info"]["show"] = False
+    conf["paper"] = "special"
+    conf["paper_width"] = 6
+    conf["paper_height"] = 5
+    conf["plot_conf"].update({"n_columns": 1, "n_rows": 1})
+    conf["box_conf"]["box_title_def"] = "%(stream_count_thin1)d Greedy streams vs %(stream_count_thick)d misusers"
+    conf["plot_conf"].update({"x_axis_title": {"title": ["RDB packets in flight limit", "Greedy", "misuser", "misuser with RDB"], "adj": [0, .45, .6, .9],
+                                               "colors" : ["black", "darkred", "darkgoldenrod1", "darkblue"] },
+                              })
+
+
+    #conf["plot_conf"]["y_axis_title"] = "Throughput "
+    #conf["file_parse"]["files_match_exp"] = "t6.*.*itt10-ps400.*num1.*"
+    return conf
+
+import throughput
+throughput.get_conf = get_throughput_conf
+
+if __name__ == "__main__":
+    import sys
+    sys.argv.extend(["-t", "DBSENDER_ZSENDER_TEST_THIN_RDBMISUSE_NvsN_throughput2.pdf",
+                     "-od", "RDBSENDER_ZSENDER_TEST_THIN_RDBMISUSE_NvsN",
+                     "-d", "/root/bendiko/pcap/rdbsender_zsender_to_zreceiver_real/RDBSENDER_ZSENDER_TEST_THIN_RDBMISUSE_NvsN/all_results/",
+                     "-frm", "t6.*.*itt10-ps400.*num1.*"])
+    parse_args()
+    main()
     #print "Type:", file_data["type"]
     #print "type_id:", file_data["type_id"]
     lines = f.readlines()
+    saved_log = None
+
+    def add_log(log):
+        res[log.key] += [(log.s_cwnd, log.s_ssthresh, log.s_type)] # snd_cwnd, ssthresh
+        stream_col.append(log.key)
+        time_col.append(log.s_time)
+        cwnd_col.append(log.s_cwnd)
+        type_col.append(log.s_type)
+        type_id_col.append(log.s_type_id)
+
+
+    class Log(object):
+        pass
 
     for i, line in enumerate(lines):
         #xprint "Line:", line
         l = line.split()
-        key = l[1] + "-" + l[2]
-        s_type = l[1].split(":")[0]
-        s_type = file_data["type"]
-        s_type_id = file_data["type_id"]
-        s_time = l[0]
-        s_cwnd = l[6]
-        s_ssthresh = l[7]
-
-        if only_changes and res[key]:
+        log = Log()
+        log.key = l[1] + "-" + l[2]
+        log.s_type = l[1].split(":")[0]
+        log.s_type = file_data["type"]
+        log.s_type_id = file_data["type_id"]
+        log.s_time = l[0]
+        log.s_cwnd = l[6]
+        log.s_ssthresh = l[7]
+
+        if only_changes and res[log.key]:
             # If this is not the last value
             if i != (len(lines) -1):
                 # Check the cwnd value of the last value
-                if res[key][-1][0] == s_cwnd:
+                if res[log.key][-1][0] == log.s_cwnd:
                     # Same value, so ignore
-                        continue
-                        #pass
+                    saved_log = log
+                    continue
+                    #pass
             else:
-                print "USE THE LAST VALUE: s_time: %s, cwnd: %s" % (s_time, s_cwnd)
-        res[key] += [(s_cwnd, s_ssthresh, s_type)] # snd_cwnd, ssthresh
-        stream_col.append(key)
-        time_col.append(s_time)
-        cwnd_col.append(s_cwnd)
-        type_col.append(s_type)
-        type_id_col.append(s_type_id)
+                print "USE THE LAST VALUE: s_time: %s, cwnd: %s" % (log.s_time, log.s_cwnd)
+
+        #if saved_log:
+        #    add_log(saved_log)
+        #    saved_log = None
+
+        add_log(log)
+        #res[key] += [(s_cwnd, s_ssthresh, s_type)] # snd_cwnd, ssthresh
+        #stream_col.append(key)
+        #time_col.append(s_time)
+        #cwnd_col.append(s_cwnd)
+        #type_col.append(s_type)
+        #type_id_col.append(s_type_id)
 
         #if i == 10000:
         #    break
 
+    if saved_log:
+        add_log(saved_log)
+        saved_log = None
+
+
     #print "RES:", len(res)
     #for k in res:
     #    print "%s : %d" % (k, len(res[k]))
 def tcp_probe_results_parse(conf, box_conf, file_data, set_key):
     #print "tcp_probe_results_parse:", file_data["hostname"]
     only_changes = True
-    if file_data["hostname"] == "rdbsender":
+    if file_data["hostname"] != "zsender":
         only_changes = True
     data = read_tcp_probe_output(file_data, only_changes=only_changes)
     file_data["data"] = data
+    #print "DATA:", data
     #print "tcp_probe_data_file:", file_data["tcp_probe_data_file"]
     box_conf["span"] = 0.1
 
         ("Bytes Loss", "Bytes Loss", "(?P<value>\d+\.\d+).*"),
         ]
     data, data_dict = util.read_output_data(keys, file_data["results"]["analysetcp"]["stdout_file"])
-    #print "data_dict:", data_dict
+
     file_data["analysetcp_data"] = data_dict
 
+    box_conf["table_data_keys"] = OrderedDict([("Name",        {"key": "name",  "type": "str", "location": "table_data"}),
+                                               ("Type",        {"key": "type",  "type": "str"}),
+                                               ("Streams",     {"key": "stream_count",  "type": "int"}),
+                                               ("Cong",        {"key": "cong_control", "type": "str"}),
+                                               ("PIF Lim",     {"key": "packets_in_flight",  "type": "int"}),
+                                               ("Data points", {"key": "data_points", "type": "int", "location": "table_data"}),
+                                               ("ITT",         {"key": "itt", "type": "str"}),
+                                               ("Payload",     {"key": "payload", "type": "int"}),
+                                           ])
 
 def tcp_probe_file_parse_func(plot_conf, file_conf, pcap_file, basename, parse_func_arg):
     #print "tcp_probe_file_parse_func"
     file_conf["results"]["analysetcp"]["stdout_file"] = analysetcp_stdout_file
     file_conf["results"]["analysetcp"]["results_file"] = analysetcp_stdout_file
     file_conf["results"]["analysetcp"]["results_cmd"] =\
-        "./analyseTCP -s %(src_ip)s -r %(dst_ip)s -p %(dst_port)s -f %(pcap_file)s -g %(pcap_file_receiver)s -o %(output_dir)s 1> %(stdout_file)s" %\
+        "analyseTCP -s %(src_ip)s -r %(dst_ip)s -p %(dst_port)s -f %(pcap_file)s -g %(pcap_file_receiver)s -o %(output_dir)s 1> %(stdout_file)s" %\
         dict(file_conf, **file_conf["results"]["analysetcp"])
 
     file_conf["color"] = "darkred" # Default color for dataset
 
+    stream_type_to_ggplot_properties = {"r": {"color_code": 'c(brewer.pal(9, "Blues")[3:8])', "legend_name": "Thin"},
+                                        "y": {"color_code": 'c(brewer.pal(9, "Greens")[3:8])', "legend_name": "Thin2"},
+                                        "z": {"color_code": 'c(brewer.pal(9, "Reds")[3:8])', "legend_name": "Thick"}}
+
+    stream_properties = file_conf["streams"][file_conf["streams_id"]]
+    stream_properties["color_code"] = stream_type_to_ggplot_properties[file_conf["streams_id"]]["color_code"]
+    stream_properties["legend_name"] = stream_type_to_ggplot_properties[file_conf["streams_id"]]["legend_name"]
+
 
 def get_conf():
     conf = graph_default.get_conf()
     #                          "sort_keys": ["payload_thin1", "itt_thin1", "rtt"], "sort_key_func": lambda x: (x[0], x[1], x[2])}
     conf["plot_conf"].update({ "n_columns": 1, "n_rows" : 1,
                                #"x_axis_lim" : [27, 60],
-                               "x_axis_lim" : [0, 300],
-                               "y_axis_lim" : [0, 50],
+                               "x_axis_lim" : [10, 600],
+                               "y_axis_lim" : [0, 45],
                                "x_axis_font_size": 0.7,
+                               "x_axis_breaks": 1,
+                               "x_axis_minor_breaks": .5,
+                               "y_axis_breaks": 10,
+                               "y_axis_minor_breaks": 5,
+                               "theme_args": {"legend.position": "left"},
+
                                #"y_axis_title" : "Congestion window size",
                                #"x_axis_title" : { "title": ["Stream type", "THICK", "THIN", "RDB:PIF:3", "RDB:PIF:6", "RDB:PIF:10", "RDB:PIF:20"],
                                #                   "adj": [0, .3, .4, .5, .65, .75, .9],
     conf["parse_results_func"] = tcp_probe_results_parse
     conf["file_parse"].update({ "func": tcp_probe_file_parse_func })
     conf["document_info"]["title"] = "TCP CONGESTION WINDOW Plots"
+    conf["document_info"]["show"] = False
     conf["paper"] = "special"
     conf["paper_width"] = 11
-    conf["paper_height"] = 6
+    conf["paper_width"] = 200
+    conf["paper_height"] = 8
     conf["print_page_title"] = False
 
     #conf["file_parse"]["files_match_regex"].append("t9-rdb-itt100:15-ps120_vs_g5_kbit5000_min10_rtt150_loss_pif20_qlen46875_delayfixed_num0_rdbsender.*")

File throughput.py

 import graph_default
 from graph_r import *
 
+host_to_color_map = { 'rdbsender': "darkblue",
+                      'zsender': "red",
+                      'ysender': "darkgoldenrod1"}
+
 def throughput_box_key_func(box_conf, conf):
+    conf["color"] = host_to_color_map[conf["hostname"]]
+
     if conf["hostname"] == "rdbsender":
         if conf["type"] == "tcp":
-            conf["color"] = "darkgoldenrod1"
+            conf["color"] = "blue"
         elif conf["type"] == "rdb":
             conf["color"] = "darkblue"
 
+    # Save the pif in a list to later get the correct column in the plot
+    order = box_conf.get("order", [])
+    order.append(int(conf["packets_in_flight"]))
+    box_conf["order"] = list(set(order))
 
 def goodput_file_parse_func(plot_conf, file_conf, pcap_file, basename, parse_func_arg):
     if graph_default.file_parse_generic(plot_conf, file_conf, pcap_file, basename, parse_func_arg) is False:
     #print "y_axis_range:", plot_conf["plot_conf"]["y_axis_range"]
     #conf["plot_conf"]["y_axis_lim"]
 
+    if "set_pif_order" in plot_conf["set_conf"]:
+        def get_col(plot):
+            #print "ORDER:", plot["box_conf"]["order"]
+            for i, pif in enumerate(sorted(plot["box_conf"]["order"])):
+                if file_conf["packets_in_flight"] == pif:
+                    return i +1
+            return 0
+        file_conf["column"] = get_col #plot_conf["set_conf"]["set_pif_order"][file_conf["packets_in_flight"]]
+
 
-def get_conf():
+def get_throughput_conf():
     conf = deepcopy(graph_default.get_conf())
-    conf["output_file"] = graph_default.args.throughput_output_file
+    conf["output_file"] = graph.args.throughput_output_file
     conf["box_conf"] = { "key": "Streams:%(stream_count_thin1)s_Duration:%(duration)s_Payload:%(payload_thin1)s_ITT:%(itt_thin1)s_RTT:%(rtt)s_%(thin_dupack)s",
                          "sort_key_func": lambda x: (x["stream_count_thin1"]),
                          "sort_keys": ["stream_count_thin1"],
                          "func" : throughput_box_key_func,
                          "box_title_def": "%(stream_count_thin1)d vs %(stream_count_thick)d DA:%(thin_dupack)s" }
-    conf["set_conf"] = { "key": "Streams:%(stream_count_thin1)s_Payload:%(payload_thin1)s_ITT:%(itt_thin1)s_RTT:%(rtt)s_PIF:%(packets_in_flight)s_Type:%(type_thin1)s"}
+    conf["set_conf"] = { "key": "Streams:%(stream_count_thin1)s_Payload:%(payload_thin1)s_ITT:%(itt_thin1)s_RTT:%(rtt)s_PIF:%(packets_in_flight)s_Type:%(type_thin1)s",
+                         "set_pif_order": {0: 1, 4: 2, 20: 3} }
     conf["page_group_def"] = { "title": "Duration: %(duration)smin, Payload: %(payload_thin1)s, ITT: %(itt_thin1)sms, RTT: %(rtt)s, Cap: %(cap)skbit",
                                           "sort_keys": ["duration", "payload_thin1", "itt_thin1", "rtt"], "sort_key_func": lambda x: (x[0], x[1], x[2], x[3])}
     conf["plot_conf"].update({ "n_columns": 2, "n_rows" : 2, "x_axis_lim" : [0, 5000], "y_axis_lim" : [0, 10000],
                                                                                                                            file_conf["stream_count_thick"])}}
     conf["sample_sec"] = 0.1
     conf["plot_conf"]["bandwidth_axis_scale"] = 1.1 # Multiplied with bandwidth to get y axis limit
+    #conf["plot_conf"]["bandwidth_axis_scale"] = 0.7
     #conf["file_parse"]["files_match_exp"] = "8_thin_*_vs_8*%s" % conf["file_parse"]["pcap_suffix"]
+    conf["plot_conf"].update({"n_columns": 1, "n_rows" : 1})
+
     return conf
 
 def get_goodput_conf():
     return goodput_conf
 
 def setup_default_goodput_conf(conf):
-    conf["output_file"] = graph_default.args.goodput_output_file
+    conf["output_file"] = graph.args.goodput_output_file
     conf["document_info"]["title"] = "Goodput plots"
     conf["file_parse"]["func"] = goodput_file_parse_func
     conf["plot_conf"]["y_axis_title"] = "Goodput (Kbit/second aggregated over %(sample_sec)d seconds)" % conf
+
+get_conf = get_throughput_conf
         return text
 
 
+def frange(*args):
+    """Handles creating range of floats"""
+    try:
+        return range(args)
+    except TypeError:
+        from numpy import arange
+        return arange(*args).tolist()
+
 def read_output_data2(conf, plot_box_dict, sets, keys, filename):
     data = []
     colnames = []