Skip to content

Commit 4ec944f

Browse files
committed
Minor tidy ups on code.
1 parent 9cdf0f5 commit 4ec944f

File tree

3 files changed

+371
-150
lines changed

3 files changed

+371
-150
lines changed

deepgp/layers/layers.py

Lines changed: 106 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,13 @@
1515

1616
class SparseGP_MPI(SparseGP):
1717

18-
def __init__(self, X, Y, Z, kernel, likelihood, mean_function=None, inference_method=None,
19-
name='sparse gp', Y_metadata=None, normalizer=False, mpi_comm=None, mpi_root=0, auto_update=True):
18+
def __init__(self, X, Y, Z, kernel, likelihood,
19+
mean_function=None, inference_method=None,
20+
name='sparse gp', Y_metadata=None,
21+
normalizer=False,
22+
mpi_comm=None,
23+
mpi_root=0,
24+
auto_update=True):
2025
self.mpi_comm = mpi_comm
2126
self.mpi_root = mpi_root
2227
self.psicov = False
@@ -39,8 +44,12 @@ def __init__(self, X, Y, Z, kernel, likelihood, mean_function=None, inference_me
3944
inference_method = SVI_VarDTC()
4045
self.svi = True
4146

42-
super(SparseGP_MPI, self).__init__(X, Y, Z, kernel, likelihood, mean_function=mean_function, inference_method=inference_method,
43-
name=name, Y_metadata=Y_metadata, normalizer=normalizer)
47+
super(SparseGP_MPI, self).__init__(X, Y, Z, kernel,
48+
likelihood,
49+
mean_function=mean_function,
50+
inference_method=inference_method,
51+
name=name, Y_metadata=Y_metadata,
52+
normalizer=normalizer)
4453

4554
if self.svi:
4655
from ..util.misc import comp_mapping
@@ -156,7 +165,14 @@ def _inference_vardtc(self):
156165

157166
class Layer(SparseGP_MPI):
158167

159-
def __init__(self, layer_lower, dim_down, dim_up, likelihood, X=None, X_variance=None, init='PCA', Z=None, num_inducing=10, kernel=None, inference_method=None, uncertain_inputs=True,mpi_comm=None, mpi_root=0, back_constraint=True, encoder=None, auto_update=True, name='layer'):
168+
def __init__(self, layer_lower,
169+
dim_down, dim_up,
170+
likelihood,
171+
X=None, X_variance=None, init='PCA',
172+
Z=None, num_inducing=10, kernel=None,
173+
inference_method=None, uncertain_inputs=True,
174+
mpi_comm=None, mpi_root=0, back_constraint=True,
175+
encoder=None, auto_update=True, name='layer'):
160176

161177
self.uncertain_inputs = uncertain_inputs
162178
self.layer_lower = layer_lower
@@ -183,10 +199,13 @@ def __init__(self, layer_lower, dim_down, dim_up, likelihood, X=None, X_variance
183199

184200
if uncertain_inputs: X = NormalPosterior(X, X_variance)
185201
if kernel is None: kernel = kern.RBF(dim_up, ARD = True)
186-
assert kernel.input_dim==X.shape[1], "The dimensionality of input has to be equal to the input dimensionality of kernel!"
202+
assert kernel.input_dim==X.shape[1], "The dimensionality of input has to be equal to the input dimensionality of the kernel!"
187203
self.Kuu_sigma = Param('Kuu_var', np.zeros(num_inducing)+1e-3, Logexp())
188204

189-
super(Layer, self).__init__(X, Y, Z, kernel, likelihood, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root, auto_update=auto_update, name=name)
205+
super(Layer, self).__init__(X, Y, Z, kernel,
206+
likelihood, inference_method=inference_method,
207+
mpi_comm=mpi_comm, mpi_root=mpi_root,
208+
auto_update=auto_update, name=name)
190209
self.link_parameter(self.Kuu_sigma)
191210
if back_constraint: self.encoder = encoder
192211

@@ -263,12 +282,22 @@ def gen_pred_layer(self, layer_lower=None, Y=None, X=None, binObserved=False):
263282
variationalterm = NormalPrior() if isinstance(self.variationalterm, NormalPrior) else NormalEntropy()
264283

265284
if binObserved:
266-
layer = BinaryPredLayer(X, Y, kernel, Z, posterior, likelihood=likelihood, layer_lower=layer_lower, inference_method=SVI_Ratio_Binary(),
267-
variationalterm= variationalterm, X_var=X_var,
268-
encoder=encoder, name=self.name)
285+
layer = BinaryPredLayer(X, Y, kernel, Z,
286+
posterior,
287+
likelihood=likelihood,
288+
layer_lower=layer_lower,
289+
inference_method=SVI_Ratio_Binary(),
290+
variationalterm=variationalterm,
291+
X_var=X_var,
292+
encoder=encoder,
293+
name=self.name)
269294
else:
270-
layer = PredLayer(X, Y, kernel, Z, posterior, likelihood=likelihood, layer_lower=layer_lower, inference_method=SVI_Ratio(),
271-
variationalterm= variationalterm, X_var=X_var,
295+
layer = PredLayer(X, Y, kernel, Z, posterior,
296+
likelihood=likelihood,
297+
layer_lower=layer_lower,
298+
inference_method=SVI_Ratio(),
299+
variationalterm=variationalterm,
300+
X_var=X_var,
272301
encoder=encoder, name=self.name)
273302
return layer
274303

@@ -282,24 +311,40 @@ def set_newX(self, X, append=False):
282311
else:
283312
self.unlink_parameter(self.X)
284313
if append:
285-
self.X = NormalPosterior(np.vstack([self.X.mean.values, X.mean.values]),np.vstack([self.X.variance.values, X.variance.values]))
314+
self.X = NormalPosterior(np.vstack([self.X.mean.values, X.mean.values]),
315+
np.vstack([self.X.variance.values, X.variance.values]))
286316
else:
287317
self.X = X
288318
self.link_parameter(self.X)
289319

290320
class ObservedLayer(Layer):
291321

292-
def __init__(self, dim_down, dim_up, Y, X=None, X_variance=None, Z=None, num_inducing=10, kernel=None, inference_method=None, likelihood=None, init='rand',
293-
mpi_comm=None, mpi_root=0, back_constraint=True, encoder=None, auto_update=True, repeatX=False, repeatXsplit=0, name='obslayer'):
322+
def __init__(self, dim_down, dim_up,
323+
Y, X=None, X_variance=None,
324+
Z=None, num_inducing=10,
325+
kernel=None, inference_method=None,
326+
likelihood=None, init='rand',
327+
mpi_comm=None, mpi_root=0,
328+
back_constraint=True, encoder=None,
329+
auto_update=True, repeatX=False,
330+
repeatXsplit=0, name='obslayer'):
294331
self.dim_up, self.dim_down = dim_up, dim_down
295332
self._Y = Y
296333
self.repeatX = repeatX
297334
self.repeatXsplit = repeatXsplit
298335
if likelihood is None: likelihood = likelihoods.Gaussian()
299336
self._toplayer_ = False
300337
self.variationalterm = NormalEntropy()
301-
super(ObservedLayer, self).__init__(None, self.dim_down, dim_up, likelihood, init=init, X=X, X_variance=X_variance, Z=Z,
302-
num_inducing=num_inducing, kernel=kernel, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root, back_constraint=back_constraint, encoder=encoder, auto_update=auto_update, name=name)
338+
super(ObservedLayer, self).__init__(None, self.dim_down, dim_up,
339+
likelihood, init=init, X=X,
340+
X_variance=X_variance, Z=Z,
341+
num_inducing=num_inducing,
342+
kernel=kernel,
343+
inference_method=inference_method,
344+
mpi_comm=mpi_comm, mpi_root=mpi_root,
345+
back_constraint=back_constraint,
346+
encoder=encoder, auto_update=auto_update,
347+
name=name)
303348

304349
def set_as_toplayer(self, flag=True):
305350
if flag:
@@ -311,13 +356,28 @@ def set_as_toplayer(self, flag=True):
311356

312357
class HiddenLayer(Layer):
313358

314-
def __init__(self, layer_lower, dim_up, X=None, X_variance=None, Z=None, num_inducing=10, kernel=None, inference_method=None, noise_var=1e-2, init='rand', mpi_comm=None, mpi_root=0, back_constraint=True, encoder=None, auto_update=True, name='hiddenlayer'):
359+
def __init__(self, layer_lower, dim_up,
360+
X=None, X_variance=None,
361+
Z=None, num_inducing=10,
362+
kernel=None, inference_method=None,
363+
noise_var=1e-2, init='rand',
364+
mpi_comm=None, mpi_root=0, back_constraint=True,
365+
encoder=None, auto_update=True, name='hiddenlayer'):
366+
315367
self.dim_up, self.dim_down = dim_up, layer_lower.X.shape[1] #self.Y.shape[1]
316368
likelihood = likelihoods.Gaussian(variance=noise_var)
317369
self.variationalterm = NormalEntropy()
318370

319-
super(HiddenLayer, self).__init__(layer_lower, self.dim_down, dim_up, likelihood, init=init, X=X, X_variance=X_variance, Z=Z,
320-
num_inducing=num_inducing, kernel=kernel, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root, back_constraint=back_constraint, encoder=encoder, auto_update=auto_update, name=name)
371+
super(HiddenLayer, self).__init__(layer_lower, self.dim_down,
372+
dim_up, likelihood, init=init,
373+
X=X, X_variance=X_variance, Z=Z,
374+
num_inducing=num_inducing,
375+
kernel=kernel,
376+
inference_method=inference_method,
377+
mpi_comm=mpi_comm, mpi_root=mpi_root,
378+
back_constraint=back_constraint,
379+
encoder=encoder, auto_update=auto_update,
380+
name=name)
321381

322382
def update_layer(self):
323383
super(HiddenLayer,self).update_layer()
@@ -335,18 +395,38 @@ def from_TopHiddenLayer(layer, name='hiddenlayer'):
335395
else:
336396
encoder = None
337397

338-
return HiddenLayer(layer.layer_lower, layer.dim_up, X=layer.X.mean.values, X_variance=layer.X.variance.values, Z=layer.Z.values,
339-
num_inducing=layer.Z.shape[1], kernel=layer.kern.copy(), inference_method=None, encoder = encoder,
340-
noise_var=layer.likelihood.variance.values, mpi_comm=layer.mpi_comm, mpi_root=layer.mpi_root, auto_update=layer.auto_update, name=name)
398+
return HiddenLayer(layer.layer_lower, layer.dim_up,
399+
X=layer.X.mean.values, X_variance=layer.X.variance.values,
400+
Z=layer.Z.values,
401+
num_inducing=layer.Z.shape[1],
402+
kernel=layer.kern.copy(),
403+
inference_method=None, encoder = encoder,
404+
noise_var=layer.likelihood.variance.values,
405+
mpi_comm=layer.mpi_comm, mpi_root=layer.mpi_root,
406+
auto_update=layer.auto_update, name=name)
341407

342408
class TopHiddenLayer(Layer):
343409

344-
def __init__(self, layer_lower, dim_up, X=None, X_variance=None, Z=None, num_inducing=10, kernel=None, inference_method=None, noise_var=1e-2, init='rand', uncertain_inputs=True, mpi_comm=None, mpi_root=0, encoder=None, back_constraint=True, auto_update=True, name='tophiddenlayer'):
410+
def __init__(self, layer_lower, dim_up, X=None, X_variance=None, Z=None,
411+
num_inducing=10, kernel=None, inference_method=None,
412+
noise_var=1e-2, init='rand', uncertain_inputs=True,
413+
mpi_comm=None, mpi_root=0,
414+
encoder=None,
415+
back_constraint=True,
416+
auto_update=True, name='tophiddenlayer'):
417+
345418
self.dim_up, self.dim_down = dim_up, layer_lower.X.shape[1]
346419
likelihood = likelihoods.Gaussian(variance=noise_var)
347420
self.variationalterm = NormalPrior()
348-
super(TopHiddenLayer, self).__init__(layer_lower, self.dim_down, dim_up, likelihood, init=init, X=X, X_variance=X_variance, Z=Z,
349-
num_inducing=num_inducing, kernel=kernel, inference_method=inference_method, uncertain_inputs=uncertain_inputs, mpi_comm=mpi_comm, mpi_root=mpi_root, back_constraint=back_constraint, encoder=encoder, auto_update=auto_update, name=name)
421+
super(TopHiddenLayer, self).__init__(layer_lower, self.dim_down,
422+
dim_up, likelihood, init=init,
423+
X=X, X_variance=X_variance, Z=Z,
424+
num_inducing=num_inducing, kernel=kernel,
425+
inference_method=inference_method, uncertain_inputs=uncertain_inputs,
426+
mpi_comm=mpi_comm, mpi_root=mpi_root,
427+
back_constraint=back_constraint,
428+
encoder=encoder, auto_update=auto_update,
429+
name=name)
350430

351431
def update_layer(self):
352432
super(TopHiddenLayer,self).update_layer()

0 commit comments

Comments
 (0)