3 from __future__
import absolute_import
4 from __future__
import division
5 from __future__
import print_function
6 from __future__
import unicode_literals
17 Adds regularization to train_net for given parameter. Its factor ahead of 18 regularization is given when initialization. 19 The param should be a BlobReference. 22 def __call__(self, net, param_init_net, param, grad=None):
24 return self.
_run(net, param_init_net, param, grad)
26 def _run(self, net, param_init_net, param, grad):
27 raise Exception(
"Not Impelemented")
31 def __init__(self, reg_lambda):
32 super(L1Norm, self).__init__()
33 assert reg_lambda >= 0,\
34 'factor ahead of regularization should be 0 or positive' 38 def _run(self, net, param_init_net, param, grad=None):
39 output_blob = net.NextScopedBlob(param +
'_l1_regularization')
40 net.LpNorm([param], [output_blob], p=1)
41 net.Scale([output_blob], [output_blob], scale=self.
reg_lambda)
46 def __init__(self, reg_lambda):
47 super(L2Norm, self).__init__()
48 assert reg_lambda >= 0,\
49 'factor ahead of regularization should be 0 or positive' 53 def _run(self, net, param_init_net, param, grad=None):
54 output_blob = net.NextScopedBlob(param +
'_l2_regularization')
55 net.LpNorm([param], [output_blob], p=2)
56 net.Scale([output_blob], [output_blob], scale=self.
reg_lambda)
61 def __init__(self, norm=1.0):
62 super(MaxNorm, self).__init__()
66 def _run(self, net, param_init_net, param, grad):
67 assert self.
norm > 0,
'norm should be bigger than 0.' 68 if isinstance(grad, core.GradientSlice):
70 [param, grad.indices, grad.values],
76 raise NotImplementedError(
77 "MaxNorm is not supported for dense parameters" 82 def __init__(self, norm=1.0):
83 super(ConstantNorm, self).__init__()
87 def _run(self, net, param_init_net, param, grad):
88 assert self.
norm > 0,
'norm should be bigger than 0.' 89 if isinstance(grad, core.GradientSlice):
91 [param, grad.indices, grad.values],
97 raise NotImplementedError(
98 "ConstantNorm is not supported for dense parameters" def _run(self, net, param_init_net, param, grad)