NetworkBaseLayer

class NetworkBaseLayer.Container(layer_class=None, name='', network=None, train_flag=False, eval_flag=False, depth=1, consensus='flat', forward_weights_init=None, bias_init=None, weight_clip=0.0, cost=None, recurrent_weights_init=None, substitute_param_expr=None)[source]
Parameters:
  • layer_class (str) – name of layer type, e.g. “hidden”, “recurrent”, “lstm” or so. see LayerClasses.
  • name (str) – custom layer name, e.g. “hidden_2”
  • network (Network.LayerNetwork) – the network which we will be part of
  • forward_weights_init (str) – see self.create_forward_weights()
  • bias_init (str) – see self.create_bias()
add_param(param, name='')[source]
Return type:theano.SharedVariable
create_bias(n, prefix='b', name='', init_eval_str=None)[source]
Parameters:n (int) – output dimension
Return type:theano.shared
create_forward_weights(n, m, name=None)[source]
Parameters:
  • n (int) – input dimension
  • m (int) – output dimension
  • name (str|None) – layer name
Return type:

theano.shared

create_random_normal_weights(n, m, scale=None, name=None)[source]
create_random_uniform_weights(n, m, p=None, p_add=None, l=None, name=None, depth=None)[source]
create_random_uniform_weights1(n, m, p=None, l=None, name=None)[source]
create_random_uniform_weights2(n, m=None, name=None)[source]
create_random_unitary_tiled_weights(n, m, name=None)[source]
create_random_unitary_weights(n, m, name=None)[source]
create_recurrent_weights(n, m, name=None)[source]
Parameters:
  • n (int) – input dimension
  • m (int) – output dimension
  • name (str|None) – layer name
Return type:

theano.shared

dot(vec, mat)[source]
get_params_dict()[source]
Return type:dict[str,numpy.ndarray|theano.sandbox.cuda.CudaNdArray]
get_params_vars()[source]

:returns list of shared vars in a well-defined order

classmethod guess_source_layer_name(layer_name)[source]
classmethod initialize_rng()[source]
layer_class = None[source]
load(head)[source]
num_params()[source]
rng_seed = 1234[source]
save(head)[source]
set_attr(name, value)[source]
Parameters:
  • name (str) – key name
  • value (bool|int|float|str|list|dict) – value

This will be stored in to_json() and save() (in HDF). More complex types like list or dict will be encoded as a JSON-str when saved to HDF.

set_params_by_dict(params)[source]
shared(value, name, borrow=True)[source]
to_json()[source]
class NetworkBaseLayer.Layer(sources, n_out, index, y_in=None, target=None, target_index=None, sparse=False, cost_scale=1.0, input_scale=1.0, L1=0.0, L2=0.0, L2_eye=None, varreg=0.0, output_L2_reg=0.0, output_entropy_reg=0.0, output_entropy_exp_reg=0.0, with_bias=True, mask='unity', dropout=0.0, batch_drop=False, batch_norm=False, bn_use_sample=False, layer_drop=0.0, residual=False, carry=False, sparse_filtering=False, gradient_scale=1.0, trainable=True, device=None, dtype='float32', **kwargs)[source]
Parameters:
  • sources (list[NetworkBaseLayer.Layer]) – list of source layers
  • n_out (int) – output dim of W_in and dim of bias
  • L1 (float) – l1-param-norm regularization
  • L2 (float) – l2-param-norm regularization
  • mask (str) – “unity” or “dropout”
add_param(param, name='', constraints=True, custom_update=None, custom_update_normalized=False, custom_update_exp_average=0, custom_update_condition=None, custom_update_accumulate_batches=None)[source]
Return type:theano.SharedVariable
batch_norm(h, dim, use_shift=True, use_std=True, use_sample=0.0, force_sample=False, index=None, sample_mean=None, gamma=None, beta=None, depth_norm=False)[source]
concat_units(other, axis=1)[source]
cost()[source]
Return type:(theano.Variable | None, dict[theano.Variable,theano.Variable] | None)
Returns:cost, known_grads
cost_scale()[source]
Return type:theano.Variable
errors()[source]
Return type:theano.Variable
find_data_layer()[source]
get_branching()[source]
get_energy()[source]
make_consensus(networks, axis=2)[source]
make_constraints()[source]
make_output(output, collapse=True, sample_mean=None, gamma=None)[source]
output_index()[source]
recurrent = False[source]
to_json()[source]
transfer_output(device)[source]
class NetworkBaseLayer.SourceLayer(n_out, x_out=None, delay=0, sparse=False, name='', network=None, eval_flag=False, data_key=None, sources=None, dropout=0, train_flag=None, mask=None, index=None, y_in=None, dtype=None)[source]
cost()[source]
errors()[source]
Return type:theano.Variable
layer_class = 'source'[source]
make_constraints()[source]
recurrent = False[source]
transfer_output(device)[source]