Bryan O'Sullivan avatar Bryan O'Sullivan committed ddf1543

Combination of build fix and WIP :-(

Comments (0)

Files changed (8)

 module Main (main) where
 
 import Control.Applicative ((<$>))
+import Control.Concurrent.MVar
 import Control.DeepSeq (rnf)
 import Control.Exception (bracket, catch, evaluate, finally)
 import Control.Monad (forM_, unless)
 import Data.Aeson ((.=), encode, object)
 import Data.Char (toLower)
+import Data.Conduit (ResourceT)
 import Data.Maybe (catMaybes)
 import Data.Text (Text, pack)
 import Data.Text.Encoding (encodeUtf8)
                 &= summary ("Pronk " ++ pronk_version ++
                             " - a modern HTTP load tester")
 
-fromArgs :: Args -> E.Request IO -> LoadTest.Config
+fromArgs :: Args -> E.Request (ResourceT IO) -> LoadTest.Config
 fromArgs Args{..} req =
     LoadTest.Config {
       LoadTest.concurrency = concurrency
   as@Args{..} <- cmdArgs $ defaultArgs &= program "pronk"
   validateArgs as
   cfg <- fromArgs as <$> createRequest as
-  run <- timed "tested" $ LoadTest.run cfg
+  (run,time) <- timed "tested" $ LoadTest.run cfg
   case run of
     Left [NetworkError err] -> fatal (show err)
     Left errs -> do
       exitWith (ExitFailure 1)
     Right results -> do
       whenNormal $ T.print "analysing results\n" ()
-      analysis <- timed "analysed" $ do
-                    r <- if bootstrap
-                         then Right <$> analyseFull results
-                         else return . Left . analyseBasic $ results
-                    evaluate $ rnf r
-                    return r
+      (analysis,_) <- timed "analysed" $ do
+                      r <- if bootstrap
+                           then Right <$> analyseFull results time
+                           else return . Left $ analyseBasic results time
+                      evaluate $ rnf r
+                      return r
       env <- environment
       let dump = object [ "config" .= cfg
                         , "environment" .= env
       maybeWriteFile json $ \h -> BL.hPut h (BL.append (encode dump) "\n")
       maybeWriteFile dump_events $ \h ->
           TL.hPutStr h . toLazyText . csvEvents $ results
-      maybeWriteFile output $ \h -> either (writeReport template h)
-                                           (writeReport template h) analysis
+      maybeWriteFile output $ \h -> either (writeReport template h time)
+                                           (writeReport template h time) analysis
       whenNormal $ do
         reportEvents stdout results
         either (reportBasic stdout) (reportFull whenLoud stdout)
   forM_ problems $ hPutStrLn stderr . ("Error: " ++)
   unless (null problems) $ exitWith (ExitFailure 1)
 
-createRequest :: Args -> IO (E.Request IO)
+createRequest :: Args -> IO (E.Request (ResourceT IO))
 createRequest Args{..} = do
   req0 <- E.parseUrl url `catch` \(e::E.HttpException) ->
           fatal $ "could not parse URL - " ++
       hPutStrLn stderr "Error: --literal and --from-file are mutually exclusive"
       exitWith (ExitFailure 1)
 
-timed :: Text -> IO a -> IO a
+timed :: Text -> IO a -> IO (a,Double)
 timed desc act = do
+  t <- newEmptyMVar
   startCPU <- getCPUTime
   startWall <- getPOSIXTime
-  act `finally` do
+  ret <- act `finally` do
     endCPU <- getCPUTime
     endWall <- getPOSIXTime
     let elapsedCPU  = fromIntegral (endCPU - startCPU) / 1e12
                 T.fixed 1 $ 100 * elapsedCPU / elapsedWall)
       else T.print "{} in {}\n"
                (desc, buildTime 4 elapsedWall)
+    putMVar t elapsedWall
+  ((,) ret) <$> takeMVar t
 
 fatal :: String -> IO a
 fatal e = do

lib/Network/HTTP/LoadTest.hs

 
 client :: Config -> Manager -> POSIXTime
        -> ResourceT IO (V.Vector Summary)
-client Config{..} mgr interval = loop 0 [] =<< liftIO getPOSIXTime
+client Config{..} mgr interval = loop 0 []
   where
-    loop !n acc now
+    loop !n acc
         | n == numRequests = return (V.fromList acc)
         | otherwise = do
+      now <- liftIO getPOSIXTime
       !evt <- timedRequest
       now' <- liftIO getPOSIXTime
       let elapsed = now' - now
           !s = Summary {
                  summEvent = evt
                , summElapsed = realToFrac elapsed
-               , summStart = realToFrac now'
+               , summStart = realToFrac now
                }
       when (elapsed < interval) $
         liftIO . threadDelay . truncate $ (interval - elapsed) * 1000000
-      loop (n+1) (s:acc) =<< liftIO getPOSIXTime
+      loop (n+1) (s:acc)
+    issueRequest :: ResourceT IO (Response L.ByteString)
     issueRequest = httpLbs (fromReq request) mgr
                    `catch` (throwIO . NetworkError)
+    timedRequest :: ResourceT IO Event
     timedRequest
       | timeout == 0 = respEvent <$> issueRequest
       | otherwise    = do
 respEvent :: Response L.ByteString -> Event
 respEvent resp =
     HttpResponse {
-      respCode = H.statusCode $ statusCode resp
+      respCode = H.statusCode $ responseStatus resp
     , respContentLength = fromIntegral . L.length . responseBody $ resp
     }

lib/Network/HTTP/LoadTest/Analysis.hs

     , analyseFull
     ) where
 
-import Criterion.Analysis (SampleAnalysis, analyseSample, scale)
-import Network.HTTP.LoadTest.Types (Analysis(..), Basic(..), Summary(..),
-                                    summEnd)
+import Criterion.Analysis (SampleAnalysis, analyseSample)
+import Network.HTTP.LoadTest.Types (Analysis(..), Basic(..), Summary(..))
 import Prelude hiding (catch)
 import Statistics.Quantile (weightedAvg)
 import qualified Data.Vector as V
 import qualified Data.Vector.Generic as G
-import qualified Data.Vector.Unboxed as U
 import qualified Statistics.Sample as S
 
-analyseFull :: V.Vector Summary -> IO (Analysis SampleAnalysis)
-analyseFull sumv = do
-  let start = summStart . G.head $ sumv
-      end = summEnd . G.last $ sumv
-      elapsed = end - start
-      timeSlice = min elapsed 1 / 200
-      slices = U.unfoldrN (round (elapsed / timeSlice)) go (sumv,1)
-        where go (v,i) = let (a,b) = G.span (\s -> summStart s <= t) v
-                             t = start + (i * timeSlice)
-                         in Just (fromIntegral $ G.length a,(b,i+1))
-      ci = 0.95
+analyseFull :: V.Vector Summary -> Double -> IO (Analysis SampleAnalysis)
+analyseFull sumv elapsed = do
+  let ci = 0.95
       resamples = 10 * 1000
   l <- analyseSample ci (G.convert . G.map summElapsed $ sumv) resamples
-  t <- analyseSample ci slices resamples
   return Analysis {
                  latency = l
                , latency99 = weightedAvg 99 100 . G.map summElapsed $ sumv
                , latency999 = weightedAvg 999 1000 . G.map summElapsed $ sumv
                , latValues = sumv
-               , throughput = scale (recip timeSlice) t
-               , throughput10 = (/ timeSlice) . weightedAvg 10 100 $ slices
-               , thrValues = slices
+               , throughput = fromIntegral (G.length sumv) / elapsed
     }
 
-analyseBasic :: V.Vector Summary -> Analysis Basic
-analyseBasic sumv = Analysis {
+analyseBasic :: V.Vector Summary -> Double -> Analysis Basic
+analyseBasic sumv elapsed = Analysis {
                       latency = Basic {
                                   mean = S.mean . G.map summElapsed $ sumv
                                 , stdDev = S.stdDev . G.map summElapsed $ sumv
                     , latency99 = weightedAvg 99 100 . G.map summElapsed $ sumv
                     , latency999 = weightedAvg 999 1000 . G.map summElapsed $ sumv
                     , latValues = sumv
-                    , throughput = Basic {
-                                     mean = S.mean slices / timeSlice
-                                   , stdDev = S.stdDev slices / timeSlice
-                                   }
-                    , throughput10 = (/ timeSlice) . weightedAvg 10 100 $ slices
-                    , thrValues = slices
+                    , throughput = fromIntegral (G.length sumv) / elapsed
                     }
- where start = summStart . G.head $ sumv
-       end = summEnd . G.last $ sumv
-       elapsed = end - start
-       timeSlice = min elapsed 1 / 200
-       slices = U.unfoldrN (round (elapsed / timeSlice)) go (sumv,1)
-         where go (v,i) = let (a,b) = G.span (\s -> summStart s <= t) v
-                              t = start + (i * timeSlice)
-                          in Just (fromIntegral $ G.length a,(b,i+1))

lib/Network/HTTP/LoadTest/Report.hs

-{-# LANGUAGE OverloadedStrings, RecordWildCards, RelaxedPolyRec #-}
+{-# LANGUAGE BangPatterns, OverloadedStrings, RecordWildCards, RelaxedPolyRec, ViewPatterns #-}
 
 module Network.HTTP.LoadTest.Report
     (
 import Criterion.Analysis (SampleAnalysis(..), OutlierEffect(..),
                            OutlierVariance(..))
 import Data.Data (Data)
-import Data.List (sort)
+import Data.Function (on)
+import Data.Maybe (fromMaybe)
 import Data.Monoid (mappend, mconcat, mempty)
 import Data.Text (Text)
 import Data.Text.Buildable (build)
 import Data.Text.Lazy.Builder (Builder)
 import Data.Vector (Vector)
 import Network.HTTP.LoadTest.Types (Analysis(..), Basic(..), Event(..),
-                                    Summary(..))
+                                    Summary(..), summEnd)
 import Paths_pronk (getDataFileName)
 import Prelude hiding (print)
+import Statistics.Function (sort)
 import Statistics.Resampling.Bootstrap (Estimate(..))
 import Statistics.Sample.KernelDensity (kde)
 import System.IO (Handle)
 import qualified Criterion.Report as R
 import qualified Data.ByteString.Lazy as L
 import qualified Data.HashMap.Strict as H
+import qualified Data.List as List
+import qualified Data.MeldableHeap as Q
 import qualified Data.Text.Format as T
 import qualified Data.Vector.Generic as G
+import qualified Data.Vector.Unboxed as U
+import qualified Data.Vector as V
 import qualified Text.Hastache as H
 
 reportBasic :: Handle -> Analysis Basic -> IO ()
   print "    mean:    {}\n" [time (mean latency)]
   print "    std dev: {}\n" [time (stdDev latency)]
   print "    99%:     {}\n    99.9%:   {}\n" (time latency99, time latency999)
-  print "\nthroughput:\n" ()
-  print "    mean:    {} req/sec\n" [mean throughput]
-  print "    std dev: {} req/sec\n" [stdDev throughput]
-  print "    10%:     {} req/sec\n" [throughput10]
+  print "\nthroughput:  {}\n" [rate throughput]
 
 reportFull :: (IO () -> IO ()) -> Handle -> Analysis SampleAnalysis -> IO ()
 reportFull whenLoud h Analysis{..} = do
     print "      upper: {}\n" [time (estUpperBound (anStdDev latency))]
   effect h (anOutlierVar latency)
   print "    99%:     {}\n    99.9%:   {}\n" (time latency99, time latency999)
-  print "\nthroughput:\n" ()
-  print "    mean:    {} req/sec\n" [estPoint (anMean throughput)]
-  whenLoud $ do
-    print "      lower: {}\n" [rate (estLowerBound (anMean throughput))]
-    print "      upper: {}\n" [rate (estUpperBound (anMean throughput))]
-  print "    std dev: {}\n" [rate (estPoint (anStdDev throughput))]
-  whenLoud $ do
-    print "      lower: {}\n" [rate (estLowerBound (anStdDev throughput))]
-    print "      upper: {}\n" [rate (estUpperBound (anStdDev throughput))]
-  effect h (anOutlierVar throughput)
-  print "    10%:     {}\n" [rate throughput10]
+  print "\nthroughput:  {}\n" [rate throughput]
 
 time :: Double -> Builder
 time = buildTime 4
       classify Timeout          = 0
       classify HttpResponse{..} = respCode
   T.hprint h "responses:\n" ()
-  forM_ (sort . H.toList $ evtMap) $ \(e,n) -> do
+  forM_ (List.sort . H.toList $ evtMap) $ \(e,n) -> do
     let nameOf 0 = "timeout "
         nameOf k = "HTTP " `mappend` build k
     T.hprint h "    {} {}\n" (nameOf e, T.left 7 ' ' n)
 templateDir = unsafePerformIO $ getDataFileName "templates"
 {-# NOINLINE templateDir #-}
 
-writeReport :: (Data a) => FilePath -> Handle -> Analysis a -> IO ()
-writeReport template h a@Analysis{..} = do
+writeReport :: (Data a) => FilePath -> Handle -> Double -> Analysis a -> IO ()
+writeReport template h elapsed a@Analysis{..} = do
   let context "include" = MuLambdaM $
                           R.includeFile [templateDir, R.templateDir]
+      context "elapsed"   = MuVariable elapsed
       context "latKdeTimes" = R.vector "x" latKdeTimes
       context "latKdePDF" = R.vector "x" latKdePDF
       context "latKde"    = R.vector2 "time" "pdf" latKdeTimes latKdePDF
       context "latValues" = MuList . map mkGenericContext . G.toList $ lats
+      context "thrTimes" = R.vector "x" thrTimes
       context "thrValues" = R.vector "x" thrValues
+      context "concTimes" = R.vector "x" . U.fromList $ map fstS conc
+      context "concValues" = R.vector "x" . U.fromList $ map sndS conc
       context n = mkGenericContext a n
       (latKdeTimes,latKdePDF) = kde 128 . G.convert . G.map summElapsed $ latValues
       lats = G.map (\s -> s { summStart = summStart s - t }) latValues
-          where t = G.minimum . G.map summStart $ latValues
+          where t = summStart . G.head $ latValues
+      (thrTimes,thrValues) = graphThroughput (min (G.length latValues) 50) elapsed latValues
+      conc = graphConcurrency lats
   tpl <- R.loadTemplate [".",templateDir] template
   bs <- H.hastacheStr H.defaultConfig tpl context
   L.hPutStr h bs
+
+data T = T (U.Vector Double) {-# UNPACK #-} !Double
+
+-- | Compute a graph of throughput, requests completed per time
+-- interval.
+graphThroughput :: Int          -- ^ Number of time slices.
+                -> Double       -- ^ Amount of time elapsed.
+                -> V.Vector Summary -> (U.Vector Double, U.Vector Double)
+graphThroughput slices elapsed sumv =
+    (G.generate slices $ \i -> fromIntegral i * timeSlice,
+     G.unfoldrN slices go (T endv 0))
+  where go (T v i) = Just (fromIntegral (G.length a), T b j)
+           where (a,b) = G.span (<=t) v
+                 t = start + (j * timeSlice)
+                 j = i+1
+        timeSlice = elapsed / fromIntegral slices
+        start = summStart . G.head $ sumv
+        endv = G.convert . sort . G.map summEnd $ sumv
+
+data S = S {
+      fstS :: {-# UNPACK #-} !Double
+    , sndS :: {-# UNPACK #-} !Int
+    }
+
+-- | Compute a graph of concurrency.
+graphConcurrency :: V.Vector Summary -> [S]
+graphConcurrency = scanl1 f . filter ((/=0) . sndS) . map (foldl1 (flip f)) .
+                   List.groupBy ((==) `on` fstS) . go Q.empty . G.toList
+  where
+    f (S _ i) (S t j) = S t (i+j)
+    go q es@(Summary{..}:xs)
+        | summStart < t = S summStart 1 : go insQ xs
+        | otherwise     = S t (-1)      : go delQ es
+      where (t,delQ) = fromMaybe (1e300,q) $ Q.extractMin q
+            insQ     = Q.insert (summStart+summElapsed) q
+    go q _ = drain q
+      where drain (Q.extractMin -> Just (t,q')) = S t (-1) : drain q'
+            drain _ = []

lib/Network/HTTP/LoadTest/Types.hs

 import Control.Exception (Exception, IOException, SomeException, try)
 import Data.Aeson.Types (Value(..), FromJSON(..), ToJSON(..), (.:), (.=), object)
 import Data.Bits (xor)
+import Data.Conduit (ResourceT)
 import Data.Data (Data)
 import Data.Hashable (Hashable(hash))
 import Data.Typeable (Typeable)
 import qualified Data.Text as T
 import qualified Data.Vector as V
 import qualified Data.Vector.Generic as G
-import qualified Data.Vector.Unboxed as U
 
 newtype Req = Req {
-      fromReq :: Request IO
+      fromReq :: Request (ResourceT IO)
     } deriving (Typeable)
 
 instance Show Req where
     , latency99 :: !Double
     , latency999 :: !Double
     , latValues :: V.Vector Summary
-    , throughput :: !a
-    , throughput10 :: !Double
-    , thrValues :: U.Vector Double
+    , throughput :: !Double
     } deriving (Eq, Show, Typeable, Data)
 
 instance (NFData a) => NFData (Analysis a) where
                           , "latency999" .= latency999
                           , "latValues" .= latValues
                           , "throughput" .= throughput
-                          , "throughput10" .= throughput10
-                          , "thrValues" .= thrValues
                           ]
 
 instance (FromJSON a) => FromJSON (Analysis a) where
                            v .: "latency99" <*>
                            v .: "latency999" <*>
                            v .: "latValues" <*>
-                           v .: "throughput" <*>
-                           v .: "throughput10" <*>
-                           v .: "thrValues"
+                           v .: "throughput"
     parseJSON _ = empty
     filepath,
     hashable >= 1.1.2.0,
     hastache,
-    http-conduit >= 1.2,
+    http-conduit >= 1.6,
     http-types,
     lifted-base,
+    meldable-heap,
+    resourcet,
     statistics >= 0.10.0.0,
     text,
     text-format >= 0.3.0.4,
     base < 5,
     bytestring,
     cmdargs >= 0.7,
+    conduit,
     criterion,
     deepseq,
     http-conduit,

templates/basic.tpl

-lat: {{latency.mean}}
-values:
+evts = [
 {{#latValues}}
-  {{summStart}}
+    Event {{summStart}} {{summElapsed}},
 {{/latValues}}
-thr:
-{{#thrValues}}
-  {{x}}
-{{/thrValues}}
+  ]

templates/report.tpl

              style="width:450px;height:278px;"></div></td>
     <td><div id="time0" class="timechart"
              style="width:450px;height:278px;"></div></td>
+   </tr>
+   <tr>
+    <td></td>
+    <td><div id="thru0" class="timechart"
+             style="width:450px;height:278px;"></div></td>
+   </tr>
+   <tr>
+    <td></td>
+    <td><div id="wtf0" class="timechart"
+             style="width:450px;height:278px;"></div></td>
+   </tr>
   </tbody>
  </table>
 
 
 <script type="text/javascript">
 $(function () {
-  function mangulate(number, name, mean, times, lats, kdetimes, kdepdf) {
+  function mangulate(number, name, mean, times, lats, kdetimes,
+                     kdepdf, thrTimes, thrValues) {
     var meanSecs = mean;
     var units = $.timeUnits(mean);
     var scale = units[0];
     kdetimes = $.scaleBy(scale, kdetimes);
     lats = $.scaleBy(scale, lats);
     var kq = $("#kde" + number);
-    var xmin = Math.min(kdetimes[0], Math.min.apply(undefined, lats));
-    var xmax = Math.max(kdetimes[kdetimes.length-1], Math.max.apply(undefined, lats));
+    var ymin = Math.min(kdetimes[0], Math.min.apply(undefined, lats));
+    var ymax = Math.max(kdetimes[kdetimes.length-1], Math.max.apply(undefined, lats));
     var k = $.plot(kq,
            [{ label: name + " latency densities",
-              data: $.zip(kdetimes, kdepdf),
+              data: $.zip(kdepdf, kdetimes),
               }],
-           { xaxis: { min: xmin, max: xmax,
+           { yaxis: { min: ymin, max: ymax, labelWidth: 50,
                       tickFormatter: $.unitFormatter(units) },
-             yaxis: { ticks: false },
+             xaxis: { ticks: false },
              grid: { hoverable: true, markings: [ { color: '#6fd3fb',
-                     lineWidth: 1.5, xaxis: { from: mean, to: mean } } ] },
+                     lineWidth: 1.5, yaxis: { from: mean, to: mean } } ] },
            });
-    var o = k.pointOffset({ x: mean, y: 0});
+    var o = k.pointOffset({ y: mean, x: 0 });
     kq.append('<div class="meanlegend" title="' + $.renderTime(meanSecs) +
-              '" style="position:absolute;left:' + (o.left + 4) +
-              'px;bottom:139px;">mean</div>');
+              '" style="position:absolute;top:' + (o.top + 4) +
+              'px;left:139px;">mean</div>');
     $.plot($("#time" + number),
            [{ label: name + " latencies",
-              data: $.zip(lats, times) }],
+              data: $.zip(times, lats) }],
            { points: { show: true, radius: 2 },
              grid: { hoverable: true },
-             xaxis: { min: xmin, max: xmax,
+             yaxis: { min: ymin, max: ymax, labelWidth: 50,
                       tickFormatter: $.unitFormatter(units) },
-             yaxis: { min: 0, max: times[times.length-1],
-                      ticks: false,
-                      transform: function(v) { return -v; },
-                      inverseTransform: function(v) { return -v; } },
+             xaxis: { min: 0, max: thrTimes[thrTimes.length-1],
+                      ticks: false },
            });
-    $.addTooltip("#kde" + number, function(x,y) { return x + ' ' + units; });
+    $.plot($("#thru" + number),
+           [{ label: name + " rps",
+              data: $.zip(thrTimes,$.scaleBy(1/(thrTimes[1]-thrTimes[0]), thrValues)) }],
+           { bars: { show: true, barWidth: thrTimes[1]-thrTimes[0] },
+             grid: { hoverable: true },
+             yaxis: { labelWidth: 50, tickFormatter: $.unitFormatter("reqs") },
+             xaxis: { ticks: false },
+           });
+    var x = $.scaleBy(1/(thrTimes[1]-thrTimes[0]), thrValues);
+    var y = $.scaleBy(1, lats);
+    x.sort(function(a,b){return a-b;});
+    y.sort(function(a,b){return a-b;});
+    $.plot($("#wtf" + number),
+           [{ label: name + " rps",
+              data: $.zip(x,y) }],
+           { lines: { show: true }});
+    $.addTooltip("#kde" + number, function(x,y) { return y + ' ' + units; });
     $.addTooltip("#time" + number, function(x,y) {
-      return 'Latency at ' + $.renderTime(y) + ': ' + x + ' ' + units;
+      return 'Latency at ' + $.renderTime(x) + ': ' + y + ' ' + units;
+    });
+    $.addTooltip("#thru" + number, function(x,y) {
+      return 'Req/sec at ' + $.renderTime(x) + ': ' + parseInt(y);
     });
   };
   mangulate(0, "name",
             /* start */ [{{#latValues}}{{summStart}},{{/latValues}}],
             /* elapsed */ [{{#latValues}}{{summElapsed}},{{/latValues}}],
             /* kde times */ [{{#latKdeTimes}}{{x}},{{/latKdeTimes}}],
-            /* kde pdf */ [{{#latKdePDF}}{{x}},{{/latKdePDF}}]);
+            /* kde pdf */ [{{#latKdePDF}}{{x}},{{/latKdePDF}}],
+            /* thr times */ [{{#thrTimes}}{{x}},{{/thrTimes}}],
+            /* thr values */ [{{#thrValues}}{{x}},{{/thrValues}}]);
 });
 $(document).ready(function () {
     $(".time").text(function(_, text) {
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.