JustPaste.it

import theano
import numpy as np
import sklearn as sk
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression, LassoLarsCV
import sys
sys.path.insert(0, '/home/donbeo/Documents/pythoncode/my_algorithm/')
sys.path.insert(0, r'C:\Users\donbeo\Documents\pythoncode\my_algorithm\\')
sys.path.insert(0, '/ichec/home/users/donbeo/pythoncode/my_algorithm/')
sys.path.insert(0, '/home/donbeo/Applications/pylearn2/')

import pylearn2
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
from pylearn2.models import mlp
from pylearn2.training_algorithms import sgd
from pylearn2.termination_criteria import EpochCounter



class SLFN():

    def __init__(self, n_nodes=10, activation_function='linear', num_epoch=10):
        self.n_nodes =  n_nodes
        self.activation_function = activation_function
        self.num_epoch = num_epoch

    def fit(self, X, y):
        n_samples, n_features = X.shape
        y2d = y.copy()
        y2d.shape = (n_samples, 1)

        ds = DenseDesignMatrix(X=X, y=y2d)

        if self.activation_function == 'linear':
            hidden_layer = mlp.Linear(layer_name='hidden',
                                      dim=self.n_nodes, irange=.1,
                                      init_bias=1.)
        
        elif self.activation_function == 'sigmoid':
            hidden_layer = mlp.Sigmoid(layer_name='hidden',
                                       dim=self.n_nodes, irange=.1,
                                       init_bias=1.)
            
        output_layer = mlp.Linear(dim=1, layer_name='y', irange=.1)
        trainer = sgd.SGD(learning_rate=.05, batch_size=10,
                          termination_criterion=EpochCounter(self.num_epoch))

        layers = [hidden_layer, output_layer]
        self.ann = mlp.MLP(layers, nvis=n_features)
        trainer.setup(self.ann, ds)

        while True:
            trainer.train(dataset=ds)
            self.ann.monitor.report_epoch()
            #self.ann.monitor()
            if not trainer.continue_learning(self.ann):
                break
        
        return self

    def predict(self, X):

        return self.ann.fprop(theano.shared(X, name='inputs')).eval().ravel()
        


'''
n = 200
p = 2
X = np.random.normal(0, 1, (n, p))
y = X[:,0]* X[:, 1] + np.random.normal(0, .1, n)
y.shape = (n, 1)


nn = SLFN(n_nodes=10, activation_function='sigmoid', num_epoch=200).fit(X, y)
y_est = nn.predict(X)

plt.plot(y)
plt.plot(y_est)
plt.show()
'''