Mentions légales du service

Skip to content
Snippets Groups Projects
Commit d8c9bad0 authored by Thalita FIRMO DRUMOND's avatar Thalita FIRMO DRUMOND
Browse files

LDMNet implementation and tests

parent 51127b79
No related branches found
No related tags found
No related merge requests found
......@@ -9,6 +9,7 @@ Created on Wed Apr 18 16:03:33 2018
import numpy as np
from sklearn.neighbors import NearestNeighbors
from sklearn.base import TransformerMixin, BaseEstimator, ClassifierMixin
from scipy import sparse
from tqdm import tqdm
from skorch_utils import TransformerNet, StopperNet, NNClassifier
......@@ -154,7 +155,7 @@ class ThetaEstimator(object):
Specification of class that should inherit LDMNet and implement
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# super().__init__(*args, **kwargs)
self.stop = False
self.warm_start = False
self.initialized_ = False
......@@ -164,17 +165,18 @@ class ThetaEstimator(object):
pass
def partial_fit(self, X, y, epochs=2):
""" X is dict with X, alpha and Z"""
return self
def transform(self, X):
return X
class LDMNetDummy(ThetaEstimator, LDMNet):
class LDMNetDummy(LDMNet, ThetaEstimator):
def __init__(self, *args,**kwargs):
super().__init__(*args, **kwargs)
class LDMNetReg(ThetaEstimator, LDMNet):
class LDMNetReg(LDMNet, ThetaEstimator):
def __init__(self, *args, lr=0.01, **kwargs):
super().__init__(*args, **kwargs)
self.lr = lr
......@@ -185,9 +187,9 @@ class LDMNetReg(ThetaEstimator, LDMNet):
def partial_fit(self, X, y, epochs=2):
if self.last_ksi is None:
self.last_ksi = X
alpha = self.alpha[-1]
Z = self.Z[-1]
self.last_ksi = X['X']
alpha = X['alpha']
Z = X['Z']
for _ in range(epochs):
dX = self.mu*(self.last_ksi - Z - alpha)
self.last_ksi -= self.lr * dX
......@@ -203,7 +205,7 @@ class LDMNetReg(ThetaEstimator, LDMNet):
self.fit(X, y)
return self.ksi
class LDMNetSkorch(LDMNet, StopperNet, TransformerNet, NNClassifier):
class LDMNetSkorch(LDMNet, StopperNet, NNClassifier, ClassifierMixin):
def __init__(self, module, layer_name,
# lr=0.01,
# tol=1e-4, max_iter=200,
......@@ -232,6 +234,8 @@ class LDMNetSkorch(LDMNet, StopperNet, TransformerNet, NNClassifier):
loss += reg_loss
return loss
if __name__ == '__main__' :
from sklearn.datasets import make_moons
from prototype_utils import MLPClassifier
......@@ -251,10 +255,10 @@ if __name__ == '__main__' :
# dummy_model = LDMNetDummy(mu=0.01, lambda_bar=0.01)
# dummy_model.fit(X,y)
torch.manual_seed(random_state)
module = MLP(X.shape[1], 4, 2)
module = MLP(n_in=X.shape[1], num_units=4, n_out=2)
model = LDMNetSkorch(
module=module,
layer_name='hidden',
layer_name='hidden1',
max_epochs=300,
batch_size=200,
lr=0.05,
......@@ -264,6 +268,9 @@ if __name__ == '__main__' :
callbacks=[EarlyStopping(10, 'valid_loss'),
NaNStopping()]
)
model.fit(X,y)
params = {
'lr': uniform(1e-4, 1e-2),
'mu': reciprocal(1e-4, 1e2),
......
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 23 17:13:00 2018
@author: thalita
"""
from sklearn.decomposition import PCA
import matplotlib.pyplot as plt
import numpy as np
import scipy as sp
def plot_ksi_alpha_time(model, every_n=3, ncols=5, y=None, **kwargs):
fitted_projs = plot_over_time(
model, which_var='alpha', every_n=every_n, ncols=ncols, y=y,
fitted_projs=None, **kwargs)
plt.suptitle('alpha')
plot_over_time(
model, which_var='ksi', every_n=every_n, ncols=ncols, y=y,
fitted_projs=fitted_projs, **kwargs)
plt.suptitle('ksi')
def plot_over_time(model, which_var, every_n=3, ncols=5, y=None,
fitted_projs=None, **kwargs):
ksi = model.__getattribute__(which_var)
nrows = len(ksi)//(ncols*every_n) + 1*(len(ksi)//(ncols*every_n) > 0)
fitted_projs_list = []
plt.figure(figsize=(3.5*ncols,2.5*nrows))
for plotn, i in enumerate(range(0,len(ksi),every_n)):
plt.subplot(nrows, ncols, plotn+1)
title = "k=%d " % i
if ksi[i].shape[1] > 2:
if fitted_projs is None:
proj = PCA(n_components=2, random_state=0).fit(ksi[i])
fitted_projs_list.append(proj)
else:
proj = fitted_projs[plotn]
ksi2d = proj.transform(ksi[i])
title += " var: %0.1f %0.1f" % tuple(proj.explained_variance_ratio_[0:2])
else:
ksi2d = ksi[i]
plt.title(title, fontsize='small')
if y is not None:
plt.scatter(*ksi2d.T, c=y, **kwargs)
else:
plt.scatter(*ksi2d.T, **kwargs)
plt.xticks(fontsize='small')
plt.yticks(fontsize='small')
return fitted_projs_list
def plot_f_df(Z, title, smooth_delta=None, **kwargs):
plt.figure(figsize=(10,4))
plt.subplot(1,2,1)
plt.title(title)
for feature in Z:
if feature.ndim > 1:
feature = feature.mean(axis=0)
plt.plot(feature, **kwargs)
Z = np.diff(Z, axis=-1)
plt.subplot(1,2,2)
plt.title("delta "+ title)
for feature in Z:
if feature.ndim > 1:
feature = feature.mean(axis=0)
if smooth_delta is not None and smooth_delta != 0:
if type(smooth_delta) is float:
smooth_delta = int(smooth_delta*feature.size)
window = np.ones(smooth_delta)
feature = np.convolve(feature, window/window.sum(), mode='valid')
plt.plot(feature, **kwargs)
def plot_ldmm_vars(model, var_name='Z', **kwargs):
Z = model.__getattribute__(var_name)
# Z is samples x features x time
Z = np.stack(Z, axis=-1)
# now it is features x samples x time
Z = Z.transpose([1,0,2])
plot_f_df(Z, var_name, **kwargs)
def plot_net_weights(model, par_name, **kwargs):
"""
model should have a SaveWeights callback
Par name is layer name
"""
params = dict(model.callbacks_)['SaveWeights'].params
par_names = [par_name + suffix for suffix in ['.weight', '.bias']]
for par_name in par_names:
if par_name in params[0]:
W = []
for p_t in params:
W.append(p_t[par_name])
# W is out x in x time
W = np.stack(W, axis=-1)
plot_f_df(W, 'W ' + par_name, **kwargs)
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Source diff could not be displayed: it is too large. Options to address this: view the blob.
......@@ -16,7 +16,7 @@ import torch.nn.functional as F
from skorch.utils import duplicate_items
from skorch.utils import get_dim
from skorch.utils import to_numpy
from skorch.utils import to_var
from skorch.utils import to_var, to_numpy
from skorch.utils import params_for
from skorch.callbacks import Callback
from skorch.net import NeuralNetClassifier, NeuralNet
......@@ -78,7 +78,10 @@ class StopperNet(object):
class TransformerNet(TransformerMixin):
# def __init__(self, *args, **kwargs):
# super().__init__(*args, **kwargs)
# self.transform_args = None
def initialize(self):
super().initialize()
self.transform_args = None
def transform(self, X, **forward_kwargs):
self.transform_args = forward_kwargs
......@@ -129,15 +132,29 @@ class MLP(nn.Module):
if name == layer_name:
return X
X = self.dropout(X)
if name != 'output':
raise Warning("name %s dos not correspont to any layers," % name +
" returning output" )
X = F.softmax(self.output(X), dim=-1)
return X
class NNClassifier(NeuralNetClassifier,ClassifierMixin):
class NNClassifier(TransformerNet, NeuralNetClassifier, ClassifierMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class SaveWeights(Callback):
def initialize(self):
self.params = []
def on_epoch_end(self, net, **kwargs):
p_t = list(net.module_.named_parameters())
p_t = dict(p_t)
for k, v in p_t.items():
p_t[k] = to_numpy(v)
self.params.append(p_t)
class EarlyStopping(Callback):
def __init__(self, n_iter_no_change=10,
metric='valid_acc'):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment