Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
finish & fix bug of 'take'
Browse files Browse the repository at this point in the history
  • Loading branch information
hgt312 committed Oct 21, 2019
1 parent e51d1fd commit b30f423
Show file tree
Hide file tree
Showing 5 changed files with 205 additions and 11 deletions.
64 changes: 64 additions & 0 deletions python/mxnet/ndarray/numpy/_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -4765,9 +4765,73 @@ def einsum(*operands, **kwargs):

@set_module('mxnet.ndarray.numpy')
def shares_memory(a, b, max_work=None):
"""
Determine if two arrays share memory
Parameters
----------
a, b : ndarray
Input arrays
Returns
-------
out : bool
See Also
--------
may_share_memory
Examples
--------
>>> np.may_share_memory(np.array([1,2]), np.array([5,8,9]))
False
This function differs from the original `numpy.shares_memory
<https://docs.scipy.org/doc/numpy/reference/generated/numpy.shares_memory.html>`_ in
the following way(s):
- Does not support `max_work`, it is a dummy argument
- Actually it is same as `may_share_memory` in MXNet DeepNumPy
"""
return _npi.share_memory(a, b).item()


@set_module('mxnet.ndarray.numpy')
def may_share_memory(a, b, max_work=None):
"""
Determine if two arrays might share memory
A return of True does not necessarily mean that the two arrays
share any element. It just means that they *might*.
Only the memory bounds of a and b are checked by default.
Parameters
----------
a, b : ndarray
Input arrays
Returns
-------
out : bool
See Also
--------
shares_memory
Examples
--------
>>> np.may_share_memory(np.array([1,2]), np.array([5,8,9]))
False
>>> x = np.zeros([3, 4])
>>> np.may_share_memory(x[:,0], x[:,1])
True
This function differs from the original `numpy.may_share_memory
<https://docs.scipy.org/doc/numpy/reference/generated/numpy.may_share_memory.html>`_ in
the following way(s):
- Does not support `max_work`, it is a dummy argument
- Actually it is same as `shares_memory` in MXNet DeepNumPy
"""
return _npi.share_memory(a, b).item()
66 changes: 65 additions & 1 deletion python/mxnet/numpy/multiarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -1320,7 +1320,7 @@ def take(self, indices, axis=None, mode='raise'): # pylint: disable=arguments-d
The arguments are the same as for :py:func:`take`, with
this array as data.
"""
take(self, indices, axis, mode=mode)
return take(self, indices, axis, mode=mode)

def one_hot(self, *args, **kwargs):
"""Convenience fluent method for :py:func:`one_hot`.
Expand Down Expand Up @@ -6424,9 +6424,73 @@ def einsum(*operands, **kwargs):

@set_module('mxnet.numpy')
def shares_memory(a, b, max_work=None):
"""
Determine if two arrays share memory
Parameters
----------
a, b : ndarray
Input arrays
Returns
-------
out : bool
See Also
--------
may_share_memory
Examples
--------
>>> np.may_share_memory(np.array([1,2]), np.array([5,8,9]))
False
This function differs from the original `numpy.shares_memory
<https://docs.scipy.org/doc/numpy/reference/generated/numpy.shares_memory.html>`_ in
the following way(s):
- Does not support `max_work`, it is a dummy argument
- Actually it is same as `may_share_memory` in MXNet DeepNumPy
"""
return _mx_nd_np.shares_memory(a, b, max_work)


@set_module('mxnet.numpy')
def may_share_memory(a, b, max_work=None):
"""
Determine if two arrays might share memory
A return of True does not necessarily mean that the two arrays
share any element. It just means that they *might*.
Only the memory bounds of a and b are checked by default.
Parameters
----------
a, b : ndarray
Input arrays
Returns
-------
out : bool
See Also
--------
shares_memory
Examples
--------
>>> np.may_share_memory(np.array([1,2]), np.array([5,8,9]))
False
>>> x = np.zeros([3, 4])
>>> np.may_share_memory(x[:,0], x[:,1])
True
This function differs from the original `numpy.may_share_memory
<https://docs.scipy.org/doc/numpy/reference/generated/numpy.may_share_memory.html>`_ in
the following way(s):
- Does not support `max_work`, it is a dummy argument
- Actually it is same as `shares_memory` in MXNet DeepNumPy
"""
return _mx_nd_np.may_share_memory(a, b, max_work)
41 changes: 40 additions & 1 deletion python/mxnet/symbol/numpy/_symbol.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
'std', 'var', 'indices', 'copysign', 'ravel', 'hanning', 'hamming', 'blackman', 'flip',
'around', 'hypot', 'rad2deg', 'deg2rad', 'unique', 'lcm', 'tril', 'identity', 'take',
'ldexp', 'vdot', 'inner', 'outer', 'equal', 'not_equal', 'greater', 'less', 'greater_equal',
'less_equal', 'hsplit', 'rot90', 'einsum', 'true_divide']
'less_equal', 'hsplit', 'rot90', 'einsum', 'true_divide', 'shares_memory', 'may_share_memory']


def _num_outputs(sym):
Expand Down Expand Up @@ -4555,4 +4555,43 @@ def einsum(*operands, **kwargs):
return _npi.einsum(*operands, subscripts=subscripts, out=out, optimize=int(optimize_arg))


@set_module('mxnet.symbol.numpy')
def shares_memory(a, b, max_work=None):
"""
Determine if two arrays share memory
Parameters
----------
a, b : _Symbol
Input arrays
Returns
-------
out : _Symbol
"""
return _npi.share_memory(a, b)


@set_module('mxnet.symbol.numpy')
def may_share_memory(a, b, max_work=None):
"""
Determine if two arrays might share memory
A return of True does not necessarily mean that the two arrays
share any element. It just means that they *might*.
Only the memory bounds of a and b are checked by default.
Parameters
----------
a, b : _Symbol
Input arrays
Returns
-------
out : _Symbol
"""
return _npi.share_memory(a, b)


_set_np_symbol_class(_Symbol)
22 changes: 13 additions & 9 deletions src/operator/numpy/np_memory_op.h
Original file line number Diff line number Diff line change
Expand Up @@ -51,15 +51,19 @@ void NumpyShareMemoryCompute(const nnvm::NodeAttrs& attrs,
*(outdata.dptr<bool>()) = false;
return;
}
uint64_t start1 = reinterpret_cast<uint64_t>(a.dptr_);
uint64_t end1 = start1 + a.Size();
uint64_t start2 = reinterpret_cast<uint64_t>(b.dptr_);
uint64_t end2 = start2 + b.Size();
if (!(start1 < end2 && start2 < end1 && start1 < end1 && start2 < end2)) {
*(outdata.dptr<bool>()) = false;
} else {
*(outdata.dptr<bool>()) = true;
}
MSHADOW_TYPE_SWITCH_WITH_BOOL(a.type_flag_, AType, {
MSHADOW_TYPE_SWITCH_WITH_BOOL(b.type_flag_, BType, {
uint64_t start1 = reinterpret_cast<uint64_t>(a.dptr_);
uint64_t end1 = start1 + a.Size() * sizeof(AType);
uint64_t start2 = reinterpret_cast<uint64_t>(b.dptr_);
uint64_t end2 = start2 + b.Size() * sizeof(BType);
if (!(start1 < end2 && start2 < end1 && start1 < end1 && start2 < end2)) {
*(outdata.dptr<bool>()) = false;
} else {
*(outdata.dptr<bool>()) = true;
}
});
});
return;
}

Expand Down
23 changes: 23 additions & 0 deletions tests/python/unittest/test_numpy_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -3542,6 +3542,29 @@ def test_np_true_divide():
assert_almost_equal(out_mx.asnumpy(), out_np, rtol=1e-3, atol=1e-3, use_broadcast=False)


@with_seed()
@use_np
def test_np_share_memory():
ops = [np.shares_memory, np.may_share_memory]
# reshape not support boolean types
dtypes = [np.int8, np.uint8, np.int32, np.int64, np.float16, np.float32, np.float64]
for op in ops:
for dt in dtypes:
x = np.zeros([13, 21, 23, 22], dtype=dt)
assert not op(x[0,:,:,:], x[1,:,:,:])
assert not op(x[2,:,:,:], x[3,:,:,:])
assert not op(x[2:5,0,0,0], x[3:4,0,0,0])
assert not op(x[2:5,0,0,0], x[4:7,0,0,0])
assert op(x[0,0,0,2:5], x[0,0,0,3:4])
assert op(x[0,6,0,2:5], x[0,6,0,4:7])
assert not op(x[0,5,0,2:5], x[0,6,0,4:7])

for adt in dtypes:
assert not op(x, np.ones((5, 0), dtype=adt))
assert not op(np.ones((5, 0), dtype=adt), x)
assert not op(np.ones((5, 0), dtype=dt), np.ones((0, 3, 0), dtype=adt))


if __name__ == '__main__':
import nose
nose.runmodule()

0 comments on commit b30f423

Please sign in to comment.