Skip to content
This repository has been archived by the owner on Mar 6, 2021. It is now read-only.

Replaced deprecated atleast2d_or_csr with check_array #7

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 10 additions & 6 deletions random_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from scipy.spatial.distance import cdist, pdist, squareform

from sklearn.metrics import pairwise_distances
from sklearn.utils import check_random_state, atleast2d_or_csr
from sklearn.utils import check_random_state, check_array
from sklearn.utils.extmath import safe_sparse_dot
from sklearn.base import BaseEstimator, TransformerMixin

Expand Down Expand Up @@ -108,7 +108,7 @@ def fit(self, X, y=None):
-------
self
"""
X = atleast2d_or_csr(X)
X = check_array(X)

self._generate_components(X)

Expand All @@ -130,7 +130,7 @@ def transform(self, X, y=None):
-------
X_new : numpy array of shape [n_samples, n_components]
"""
X = atleast2d_or_csr(X)
X = check_array(X)

if (self.components_ is None):
raise ValueError('No components initialized')
Expand Down Expand Up @@ -165,7 +165,7 @@ class RandomLayer(BaseRandomLayer):
array of the same shape as its argument (the input activation array, of
shape [n_samples, n_hidden]). Functions provided are 'sine', 'tanh',
'tribas', 'inv_tribas', 'sigmoid', 'hardlim', 'softlim', 'gaussian',
'multiquadric', or 'inv_multiquadric'.
'multiquadric', 'inv_multiquadric' and 'reclinear'.

Parameters
----------
Expand All @@ -192,8 +192,8 @@ class RandomLayer(BaseRandomLayer):

It must be one of 'tanh', 'sine', 'tribas', 'inv_tribas',
'sigmoid', 'hardlim', 'softlim', 'gaussian', 'multiquadric',
'inv_multiquadric' or a callable. If None is given, 'tanh'
will be used.
'inv_multiquadric', 'reclinear' or a callable. If None is given,
'tanh' will be used.

If a callable is given, it will be used to compute the activations.

Expand Down Expand Up @@ -241,6 +241,9 @@ class RandomLayer(BaseRandomLayer):
_inv_multiquadric = (lambda x:
1.0/(np.sqrt(1.0 + pow(x, 2.0))))

# rectified linear: max(0, x)
_reclinear = (lambda x: np.maximum(0, x))

# internal activation function table
_internal_activation_funcs = {'sine': np.sin,
'tanh': np.tanh,
Expand All @@ -252,6 +255,7 @@ class RandomLayer(BaseRandomLayer):
'gaussian': _gaussian,
'multiquadric': _multiquadric,
'inv_multiquadric': _inv_multiquadric,
'reclinear': _reclinear
}

def __init__(self, n_hidden=20, alpha=0.5, random_state=None,
Expand Down