Commits

Carsten Senger committed 5293a41

A little bit of pep8

Comments (0)

Files changed (2)

 import benchmarks
 import socket
 
+
 def perform_upload(pypy_c_path, args, force_host, options, res, revision,
                    changed=True, postfix='', branch='default'):
     from saveresults import save
         host = force_host
     else:
         host = socket.gethostname()
-    print save(project, revision, res, options, name, host, changed=changed, branch=branch)
+    print save(project, revision, res, options, name, host, changed=changed,
+               branch=branch)
 
-        
+
 def run_and_store(benchmark_set, result_filename, pypy_c_path, revision=0,
                   options='', branch='default', args='', upload=False,
                   force_host=None, fast=False, baseline=sys.executable,
     res = [(name, result.__class__.__name__, result.__dict__)
            for name, result in results]
     f.write(json.dumps({
-        'revision' : revision,
-        'results' : res,
-        'options' : options,
-        'branch'  : branch,
+        'revision': revision,
+        'results': res,
+        'options': options,
+        'branch': branch,
         }))
     f.close()
     if upload:
             argsbase, argschanged = args, args
         if 'pypy' in baseline:
             perform_upload(pypy_c_path, argsbase, force_host, options, res,
-                           revision, changed=False, postfix=postfix, branch=branch)
+                           revision, changed=False, postfix=postfix,
+                           branch=branch)
         perform_upload(pypy_c_path, argschanged, force_host, options, res,
                        revision, changed=True, postfix=postfix, branch=branch)
 
+
 BENCHMARK_SET = ['richards', 'slowspitfire', 'django', 'spambayes',
                  'rietveld', 'html5lib', 'ai']
 BENCHMARK_SET += perf._FindAllBenchmarks(benchmarks.__dict__).keys()
 
+
 class WrongBenchmark(Exception):
     pass
 
+
 def main(argv):
     import optparse
     parser = optparse.OptionParser(
                             " If there is a comma in this option's value, the"
                             " arguments before the comma (interpreted as a"
                             " space-separated list) are passed to the baseline"
-                            " python, and the arguments after are passed to the"
-                            " changed python. If there's no comma, the same"
-                            " options are passed to both."))
+                            " python, and the arguments after are passed to"
+                            " the changed python. If there's no comma, the"
+                            " same options are passed to both."))
     parser.add_option("--upload", default=False, action="store_true",
                       help="Upload results to speed.pypy.org")
     parser.add_option("--force-host", default=None, action="store",
     # use 'default' if the branch is empty
     if not options.branch:
         options.branch = 'default'
-    
+
     benchmarks = options.benchmarks.split(',')
     for benchmark in benchmarks:
         if benchmark not in BENCHMARK_SET:
 
 Example usage:
 
-  $ ./saveresults.py result.json -r '45757:fabe4fc0dc08' -n pypy-c-jit -H tannit
-  
+  $ ./saveresults.py result.json -r '45757:fabe4fc0dc08' -n pypy-c-jit \
+    -H tannit
+
   OR
-  
-  $ ./saveresults.py result.json -r '45757:fabe4fc0dc08' -n pypy-c-jit-64 -H tannit
 
+  $ ./saveresults.py result.json -r '45757:fabe4fc0dc08' -n pypy-c-jit-64 \
+    -H tannit
 """
 
-import sys
-import urllib, urllib2, time
 from datetime import datetime
 import optparse
+import sys
+import time
+import urllib
+import urllib2
+
 
 SPEEDURL = "http://speed.pypy.org/"
 
+
 def save(project, revision, results, options, interpreter, host, testing=False,
          changed=True, branch='default'):
     testparams = []
     #Parse data
     data = {}
     error = 0
-        
+
     for b in results:
         bench_name = b[0]
         res_type = b[1]
                 data['std_dev'] = results['std_changed']
             else:
                 data['std_dev'] = results['std_base']
-        if testing: testparams.append(data)
-        else: error |= send(data)
+        if testing:
+            testparams.append(data)
+        else:
+            error |= send(data)
+
     if error:
         raise IOError("Saving failed.  See messages above.")
-    if testing: return testparams
-    else: return 0
-    
+    if testing:
+        return testparams
+    else:
+        return 0
+
+
 def send(data):
     #save results
     params = urllib.urlencode(data)
     f = None
     response = "None"
-    info = str(datetime.today()) + ": Saving result for " + data['executable'] + " revision "
-    info += str(data['commitid']) + ", benchmark " + data['benchmark']
+    info = ("%s: Saving result for %s revision %s, benchmark %s" %
+            (str(datetime.today()), data['executable'],
+             str(data['commitid']), data['benchmark']))
     print(info)
     try:
         retries = [1, 2, 3, 6]
 
 if __name__ == '__main__':
     parser = optparse.OptionParser(usage="%prog result.json [options]")
-    parser.add_option('-r', '--revision', dest='revision', default=None, type=str)
+    parser.add_option('-r', '--revision', dest='revision',
+                      default=None, type=str)
     parser.add_option('-n', '--name', dest='name', default=None, type=str)
     parser.add_option('-H', '--host', dest='host', default=None, type=str)
     parser.add_option('-b', '--baseline', dest='changed', default=True,
     parser.format_description = lambda fmt: __doc__
     parser.description = __doc__
     options, args = parser.parse_args()
-    if options.revision is None or options.name is None or options.host is None or \
-            len(args) != 1:
+    if (options.revision is None or options.name is None or
+        options.host is None or len(args) != 1):
         parser.print_help()
         sys.exit(2)
     main(args[0], options)