# Commits

committed 190aa32

looks like it kindamaybe works.

# flock.py

` """`
` from __future__ import division`
` `
`+import numpy as np`
` import scipy as sp`
`-import numpy as np`
`-from numpy import linalg as linalg`
`+from scipy.spatial import distance`
` from copy import copy`
` `
` # we expect params to contain the following keys:`
` }`
` `
` class SimState(object):`
`-    "A thin wrapper around some simulation state."`
`+    "A holder for some simulation state."`
`     params = None`
`     def __init__(self, params):`
`         self.params = params.copy()`
` def do_flocking_sim(simstate, steps=100, seed=None):`
`     simstate = copy(simstate)`
`     params = simstate.params`
`+    locs = simstate.locs`
`+    vels = simstate.vels`
`     for step in xrange(steps):`
`-        simstate.vels = normalise_lengths(simstate.vels + random_unit_vectors(params) * params['noise'])`
`-        simstate.locs = simstate.locs + params['dt'] * simstate.vels`
`+        #The algorithm: `
`+        #take neighbourhood sum, normalise, perturb, normalise.`
`+        #Step 1: take neighbourhood sum, normalise:`
`+        sp_neighbours = basic_calc_neighbours(locs, params)`
`+        # this should give us the sum of all neighbouring velocities,`
`+        # including the agent's own.`
`+        vels = normalise_lengths(sp_neighbours * vels)`
`+        #Step 2: perturb, normalise:`
`+        vels = normalise_lengths(vels * (1-params['noise']) + random_unit_vectors(params) * params['noise'])`
`+        `
`+        #boundary conditions?`
`+        locs = fract(locs + params['dt'] * vels)`
`+    simstate.locs = locs`
`+    simstate.vels = vels`
`     return simstate        `
` `
`-def random_unit_vectors(params):`
`-    """general a matrix of isotropically-distributed unit row vectors"""`
`-    #the normal distribution is isotropic in all dimensions`
`-    vectors = np.random.normal(0., 1., (params['num_agents'], 2))`
`-    return normalise_lengths(vectors)`
`-`
` def normalise_lengths(row_vectors):`
`     """normalise the lengths of some stacked row vectors.`
`     """`
`     return row_vectors / np.sqrt(np.sum(np.square(row_vectors), 1)).reshape(-1,1)`
`+`
`+def random_isotropic_vectors(params):`
`+    """general a matrix of isotropically-distributed row vectors of mean length 1"""`
`+    #the normal distribution is isotropic in all dimensions`
`+    return np.random.normal(0., 2 ** -0.5, (params['num_agents'], 2))`
`+`
`+def random_unit_vectors(params):`
`+    """general a matrix of isotropically-distributed unit row vectors"""`
`+    return normalise_lengths(random_isotropic_vectors(params))`
`+`
`+def distances(row_vectors):`
`+    """construct an upper-triangular pairwise distance matrix from stacked`
`+    rows of observation vectors. Take same args as pdist proper. nb - always`
`+    casts to 64 bit floats."""`
`+    return distance.squareform(`
`+      distance.pdist(row_vectors)`
`+    )`
`+`
`+def basic_calc_neighbours(row_vectors, params):`
`+    #some forgotten bit of profiling long ago suggested that this weird casting saved some CPU cycles`
`+    return sp.sparse.csr_matrix(`
`+      (distances(row_vectors)<params['radius']).astype('int8')`
`+    )`
`+`
`+def fract(a):`
`+    """shorthand for 'fractional part of'"""`
`+    return np.remainder(a,1.0)`