Commit 46a948ea by Ting PAN

Fix the crash of sharing mem in StopGradientOp

1 parent 387a3675
......@@ -78,7 +78,8 @@ List Brief
`BNLayer`_ The implementation of ``BNLayer``.
`NormalizeLayer`_ The implementation of ``NormalizeLayer``.
`TileLayer`_ The extended implementation of ``TileLayer``.
`ExpandDimsLayer`_ The implementation of ``ExpandDimsLayer``
`ExpandDimsLayer`_ The implementation of ``ExpandDimsLayer``.
`StopGradientLayer`_ The implementation of ``StopGradientLayer``.
`ProposalLayer`_ The implementation of ``ProposalLayer``.
======================== =============================================================================
......@@ -186,6 +187,7 @@ API Reference
.. _NormalizeLayer: #dragon.vm.caffe.layers.common.NormalizeLayer
.. _TileLayer: #dragon.vm.caffe.layers.common.TileLayer
.. _ExpandDimsLayer: #dragon.vm.caffe.layers.common.ExpandDimsLayer
.. _StopGradientLayer: #dragon.vm.caffe.layers.common.StopGradientLayer
.. _ProposalLayer: #dragon.vm.caffe.layers.common.ProposalLayer
.. _SoftmaxWithLossLayer: #dragon.vm.caffe.layers.loss.SoftmaxWithLossLayer
......
......@@ -16,6 +16,7 @@ List Brief
`Solver.net`_ Return the train net.
`Solver.test_nets`_ Return the test nets.
`Solver.iter`_ Return or Set the current iteration.
`Solver.lr`_ Return or Set the current learning rate.
==================== =============================================================================
API Reference
......@@ -36,6 +37,7 @@ API Reference
.. _Solver.net: #dragon.vm.caffe.solver.Solver.net
.. _Solver.test_nets: #dragon.vm.caffe.solver.Solver.test_nets
.. _Solver.iter: #dragon.vm.caffe.solver.Solver.iter
.. _Solver.lr: #dragon.vm.caffe.solver.Solver.lr
.. _[LeCun et.al, 1998]: http://yann.lecun.com/exdb/publis/#lecun-98b
.. _[Sutskever et.al, 2012]: http://www.cs.utoronto.ca/~bonner/courses/2016s/csc321/lectures/lec6.pdf
......
......@@ -57,5 +57,6 @@ from .common import InnerProductLayer, \
TileLayer, \
ReductionLayer, \
ExpandDimsLayer, \
StopGradientLayer, \
ProposalLayer, \
DenseConcatLayer
\ No newline at end of file
......@@ -618,6 +618,20 @@ class ExpandDimsLayer(Layer):
return ops.ExpandDims(input, **self._param)
class StopGradientLayer(Layer):
"""
The implementation of ``StopGradientLayer``.
"""
def __init__(self, LayerParameter):
super(StopGradientLayer, self).__init__(LayerParameter)
def Setup(self, bottom):
super(StopGradientLayer, self).Setup(bottom)
input = bottom[0] if isinstance(bottom, list) else bottom
return ops.StopGradient(input, **self._param)
class ProposalLayer(Layer):
"""The implementation of ``ProposalLayer``.
......
......@@ -399,6 +399,26 @@ class Solver(object):
def iter(self, value):
self._iter = value
@property
def lr(self):
"""Return or Set the current learning rate. [**Extended**]
Parameters
----------
iter : float
The value of learning rate to set.
Returns
-------
The current learning rate.
"""
return self._optimizer.lr
@lr.setter
def lr(self, value):
self._optimizer.lr = value
class SGDSolver(Solver):
"""The Momentum-SGD Solver, introduced by `[LeCun et.al, 1998]`_.
......
......@@ -36,7 +36,7 @@ find_packages('dragon')
find_modules()
setup(name = 'dragon',
version='0.2.1.10',
version='0.2.1.11',
description = 'Dragon: A Computation Graph Virtual Machine Based Deep Learning Framework',
url='https://github.com/neopenx/Dragon',
author='Ting Pan',
......
......@@ -61,15 +61,13 @@ OPERATOR_SCHEMA(GradientGather).NumOutputs(1);
NO_GRADIENT(GradientGather);
template <class Context>
void StopGradientOp<Context>::RunOnDevice() {
ws()->CreateAvatar(output(0), &input(0));
}
void StopGradientOp<Context>::RunOnDevice() {}
DEPLOY_CPU(StopGradient);
#ifdef WITH_CUDA
DEPLOY_CUDA(StopGradient);
#endif
OPERATOR_SCHEMA(StopGradient).NumInputs(1).NumOutputs(1);
OPERATOR_SCHEMA(StopGradient).NumInputs(1).NumOutputs(1).Inplace({ { 0, 0 } });;
NO_GRADIENT(StopGradient);
} // namespace dragon
} // namespace dragon
\ No newline at end of file
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!