Skip to content

Commit 670a975

Browse files
gchanansoumith
authored andcommitted
explicit Ptr constructors, fast transposed copy.
1 parent 263ff57 commit 670a975

File tree

6 files changed

+14
-14
lines changed

6 files changed

+14
-14
lines changed

tools/cwrap/plugins/Broadcast.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -47,16 +47,16 @@ class Broadcast(CWrapPlugin):
4747
def getPreArgStringTemplate(self, type=None):
4848
if type == None:
4949
ret = """THTensor *${arg_op_other}_save = ${arg_op_other};
50-
THTensorPtr ${arg_op_other}_guard = THTensor_(new)(LIBRARY_STATE_NOARGS);\n"""
50+
THTensorPtr ${arg_op_other}_guard(THTensor_(new)(LIBRARY_STATE_NOARGS));\n"""
5151
else:
5252
tensor_type = "TH" + type + "Tensor"
5353
cuda_tensor_type = "THCuda" + type + "Tensor"
5454
ret = ("#if !IS_CUDA\n" +
5555
tensor_type + " *${arg_op_other}_save = ${arg_op_other};\n" +
56-
tensor_type + "Ptr ${arg_op_other}_guard = " + tensor_type + "_new(LIBRARY_STATE_NOARGS);\n" +
56+
tensor_type + "Ptr ${arg_op_other}_guard(" + tensor_type + "_new(LIBRARY_STATE_NOARGS));\n" +
5757
"#else\n" +
5858
cuda_tensor_type + " *${arg_op_other}_save = ${arg_op_other};\n" +
59-
"THPPointer<" + cuda_tensor_type + "> ${arg_op_other}_guard = " + cuda_tensor_type + "_new(LIBRARY_STATE_NOARGS);\n" +
59+
"THPPointer<" + cuda_tensor_type + "> ${arg_op_other}_guard (" + cuda_tensor_type + "_new(LIBRARY_STATE_NOARGS));\n" +
6060
"#endif\n")
6161
return Template(ret)
6262

@@ -85,13 +85,13 @@ def getPreArgStringTemplate(self, type=None):
8585
long ${arg_op_a}_dim${idx}_size = THTensor_(size)(LIBRARY_STATE ${arg_op_dim}, ${arg_op_dim_value});\n""")
8686

8787
OUT_PLACE_PRE_EXPAND1_DIM_TEMPLATE = Template(
88-
"""THLongStoragePtr ${arg_op_a}_storage = THLongStorage_newWithSize1(${arg_op_a}_dim0_size);\n""")
88+
"""THLongStoragePtr ${arg_op_a}_storage(THLongStorage_newWithSize1(${arg_op_a}_dim0_size));\n""")
8989

9090
OUT_PLACE_PRE_EXPAND2_DIM_TEMPLATE = Template(
91-
"""THLongStoragePtr ${arg_op_a}_storage = THLongStorage_newWithSize2(${arg_op_a}_dim0_size, ${arg_op_a}_dim1_size);\n""")
91+
"""THLongStoragePtr ${arg_op_a}_storage(THLongStorage_newWithSize2(${arg_op_a}_dim0_size, ${arg_op_a}_dim1_size));\n""")
9292

9393
OUT_PLACE_PRE_EXPAND3_DIM_TEMPLATE = Template(
94-
"""THLongStoragePtr ${arg_op_a}_storage = THLongStorage_newWithSize3(${arg_op_a}_dim0_size, ${arg_op_a}_dim1_size, ${arg_op_a}_dim2_size);\n""")
94+
"""THLongStoragePtr ${arg_op_a}_storage(THLongStorage_newWithSize3(${arg_op_a}_dim0_size, ${arg_op_a}_dim1_size, ${arg_op_a}_dim2_size));\n""")
9595

9696
OUT_PLACE_PRE_EXPAND_POST_DIM_TEMPLATE = Template(
9797
"""if (!THTensor_(expand)(LIBRARY_STATE ${arg_op_a}_guard.get(), ${arg_op_a}, ${arg_op_a}_storage, ${raise_errors})) {

torch/csrc/Module.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -482,7 +482,7 @@ PyObject *THPModule_inferSize(PyObject *_unused, PyObject *args)
482482
THLongStorage *size1 = size1_guard.get();
483483
THLongStoragePtr size2_guard = THPUtils_unpackSize(arg2);
484484
THLongStorage *size2 = size2_guard.get();
485-
THLongStoragePtr sizes_guard = THLongStorage_new();
485+
THLongStoragePtr sizes_guard(THLongStorage_new());
486486
THLongStorage *sizes = sizes_guard.get();
487487

488488
THLongStorage_inferSize2(sizes, size1->data, size1->size, size2->data, size2->size, 1);

torch/csrc/copy_utils.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ void THPInsertTensorCopyFunction(
9696
TensorSrc* src = THPTypeInfo<TensorSrc>::cdata(src_);
9797

9898
TensorSrc *src_save = src;
99-
THPPointer<TensorSrc> src_guard = newForExpand<TensorSrc>(LIBRARY_STATE_NOARGS);
99+
THPPointer<TensorSrc> src_guard(newForExpand<TensorSrc>(LIBRARY_STATE_NOARGS));
100100

101101
int ret = expand_inplace1<TensorSrc, TensorDst>(LIBRARY_STATE src_guard.get(), src, dst, "src", "dst", true);
102102
if (ret == 0) {

torch/csrc/expand_utils.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ int expand_inplace1(LIBRARY_STATE_TYPE ExpandType *r, ExpandType *to_expand, Ten
5959
ptrdiff_t to_expand_nElem = THSize_nElement(to_expand->nDimension, to_expand->size);
6060
ptrdiff_t tensor_nElem = THSize_nElement(tensor->nDimension, tensor->size);
6161
bool to_expand_raise = !fallback || (to_expand_nElem != tensor_nElem);
62-
THLongStoragePtr tensor_size = THLongStorage_newWithSize(tensor->nDimension);
62+
THLongStoragePtr tensor_size(THLongStorage_newWithSize(tensor->nDimension));
6363
THLongStorage_rawCopy(tensor_size.get(), tensor->size);
6464

6565
int ret = expand_inplace(LIBRARY_STATE r, to_expand, tensor, to_expand_name, tensor_name, fallback,
@@ -80,7 +80,7 @@ int expand_inplace2(LIBRARY_STATE_TYPE TensorType *r1, TensorType *r2,
8080
ptrdiff_t to_expand2_nElem = THSize_nElement(to_expand2->nDimension, to_expand2->size);
8181
bool to_expand1_raise = !fallback || (tensor_nElem != to_expand1_nElem);
8282
bool to_expand2_raise = !fallback || (tensor_nElem != to_expand2_nElem);
83-
THLongStoragePtr tensor_size = THLongStorage_newWithSize(tensor->nDimension);
83+
THLongStoragePtr tensor_size(THLongStorage_newWithSize(tensor->nDimension));
8484
THLongStorage_rawCopy(tensor_size.get(), tensor->size);
8585

8686
int ret = expand_inplace(LIBRARY_STATE r1, to_expand1, tensor, to_expand1_name, tensor_name, fallback,

torch/csrc/generic/Tensor.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -827,7 +827,7 @@ void THPTensor_(initCopyMethods)()
827827
{
828828
auto& h = THTensor_(copy_functions);
829829
// copy from same type
830-
THPInsertCopyFunction(h, &THTensor_(copy));
830+
THPInsertTensorCopyFunction(h, &THTensor_(copy));
831831
// copy from CPU types
832832
THPInsertTensorCopyFunction(h, &THTensor_(copyByte));
833833
THPInsertTensorCopyFunction(h, &THTensor_(copyChar));

torch/csrc/generic/methods/TensorApply.cwrap

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ static PyObject * THPTensor_(map)(THPTensor *self, PyObject *args)
5959
THTensor *src = src_object->cdata;
6060

6161
THTensor *src_save = src;
62-
THTensorPtr src_guard = THTensor_(new)(LIBRARY_STATE_NOARGS);
62+
THTensorPtr src_guard(THTensor_(new)(LIBRARY_STATE_NOARGS));
6363

6464
int ret = expand_inplace1<THTensor, THTensor>(src_guard.get(), src, tensor, "src", "tensor", true);
6565
if (ret == 0) {
@@ -108,9 +108,9 @@ static PyObject * THPTensor_(map2)(THPTensor *self, PyObject *args)
108108
THTensor *src2 = src2_object->cdata;
109109

110110
THTensor *src1_save = src1;
111-
THTensorPtr src1_guard = THTensor_(new)(LIBRARY_STATE_NOARGS);
111+
THTensorPtr src1_guard(THTensor_(new)(LIBRARY_STATE_NOARGS));
112112
THTensor *src2_save = src2;
113-
THTensorPtr src2_guard = THTensor_(new)(LIBRARY_STATE_NOARGS);
113+
THTensorPtr src2_guard(THTensor_(new)(LIBRARY_STATE_NOARGS));
114114

115115
int ret = expand_inplace2<THTensor>(src1_guard.get(), src2_guard.get(), src1, src2, tensor, "src1", "src2", "tensor", true);
116116
if (ret == 0) {

0 commit comments

Comments
 (0)