Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

working auto differentiation, enables the hmc on test cases. #53

Open
wants to merge 13 commits into
base: master
Choose a base branch
from
1 change: 0 additions & 1 deletion cpnest/cpnest.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,6 @@ def __init__(self,
self.nthreads = nthreads

output = os.path.join(output, '')
os.makedirs(output, exist_ok=True)

self.logger = logging.getLogger('CPNest')
self.logger.update(output=output, verbose=verbose)
Expand Down
103 changes: 103 additions & 0 deletions cpnest/numerical_grad.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import numpy as np
from scipy.interpolate import Rbf

class numerical_gradient:
"""
numerical gradient class. It computes
the gradient of a scalar function with respect to its
arguments using first order numerical differentiation.
The class stores its samples and attempts to
interpolate for speed.
Can be signalled to reinitialise
"""
def __init__(self, dimension, function, cache_size = 1000):
self.dimension = dimension
self.interpolant = [None]*self.dimension
self.step = 1e-7
self.function = function
self.index = self.counter()
self.k = 0
self.cache_size = cache_size
self.cache = np.zeros((self.cache_size,self.dimension), dtype = np.float64)
self.cache_values = np.zeros((self.cache_size,self.dimension), dtype = np.float64)
self.brute_force = True

def __call__(self, x):
"""
x has to be a live point
"""
if self.brute_force == True:
return self.finite_differencing(x)
else:
return self.interpolated_differencing(x)

def counter(self):
n = 0
while True:
yield n%self.cache_size
n += 1

def finite_differencing(self, x):
"""
compute the central difference numerical derivative

x has to be a np.ndarray
"""
v = np.zeros(self.dimension)
xl = x.copy()
xu = x.copy()
for i,n in enumerate(x.names):
xl[n] -= self.step
xu[n] += self.step
v[i] = (self.function(xu)-self.function(xl))/(2*self.step)
xl[n] += self.step
xu[n] -= self.step

oldk = self.k
self.k = next(self.index)
self.cache[self.k, :] = x.values
self.cache_values[self.k, :] = v
# if oldk == self.cache_size-1 and self.k == 0 and self.brute_force == True:
## for j in range(self.cache_size): print(self.cache[j, :])
# print(oldk,self.cache_size-1,self.k)
# print('updated gradient state')
# self.update_state()
# exit()
return v

def interpolated_differencing(self, x):
y = x.values
v = np.zeros(self.dimension)
return np.array([self.interpolant[i](*np.transpose(y)) for i in range(self.dimension)])

def interpolate(self):

for i in range(self.dimension):
self.interpolant[i] = Rbf(*self.cache.T, self.cache_values[:,i])
return self.interpolant

def update_state(self):
if self.brute_force == True:
self.interpolate()
self.brute_force = False
elif self.brute_force == False:
self.brute_force = True

if __name__=="__main__":

dim = 20
def function(x):
return np.sum([x[i]**2 for i in range(x.shape[0])])


X = np.random.uniform(-10,10.,size=(3000,dim))

N = numerical_gradient(dim, function)

for i in range(X.shape[0]):
if i == 1000 or i == 2000:
print('switching computation mode')
N.update_state()
print("x = ",X[i], "num grad = ",N(X[i]))


18 changes: 8 additions & 10 deletions cpnest/parameter.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ cimport numpy as np
import numpy as np

cpdef LivePoint rebuild_livepoint(names):
cdef LivePoint lp=LivePoint(names)
return lp
cdef LivePoint lp=LivePoint(names)
return lp

cdef class LivePoint:
"""
Expand All @@ -28,9 +28,9 @@ cdef class LivePoint:
self.names = names
self.dimension = len(names)
if d is not None:
self.values = array.array('d',d)
self.values = array.array('d',d)
else:
self.values = array.array('d',[0]*self.dimension)
self.values = array.array('d',[0]*self.dimension)

def keys(self):
return self.names
Expand All @@ -42,12 +42,12 @@ cdef class LivePoint:
return self.__class__.__name__+'({0:s}, d={1:s}, logL={2:f}, logP={3:f})'.format(str(self.names),str(self.values),self.logL,self.logP)

def __getstate__(self):
return (self.logL,self.logP,self.values)
return (self.logL,self.logP,self.values)

def __setstate__(self,state):
self.logL=state[0]
self.logP=state[1]
self.values=array.array('d',state[2])
self.logL=state[0]
self.logP=state[1]
self.values=array.array('d',state[2])

def __str__(LivePoint self):
return str({n:self[n] for n in self.names})
Expand Down Expand Up @@ -150,5 +150,3 @@ cdef class LivePoint:
x['logL'] = self.logL
x['logPrior'] = self.logP
return x


Loading