Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
3a8c699
naive implement
WintersMontagne10335 Dec 1, 2025
bcbeb54
update
WintersMontagne10335 Dec 1, 2025
1f92a46
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 1, 2025
cb487f9
update
WintersMontagne10335 Dec 1, 2025
f582b9f
update
WintersMontagne10335 Dec 2, 2025
b63d45d
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 2, 2025
d2cf355
update
WintersMontagne10335 Dec 2, 2025
5f77217
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 2, 2025
5096c40
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 5, 2025
ff24ce3
update
WintersMontagne10335 Dec 5, 2025
e15c147
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 5, 2025
32c16f6
update
WintersMontagne10335 Dec 6, 2025
4fbc263
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 6, 2025
1953589
update
WintersMontagne10335 Dec 6, 2025
9a2de36
update
WintersMontagne10335 Dec 6, 2025
f8e0db9
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 8, 2025
1a41726
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 9, 2025
4fe497c
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 9, 2025
98e5b17
update
WintersMontagne10335 Dec 9, 2025
967b3bc
update
WintersMontagne10335 Dec 9, 2025
d54b205
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 9, 2025
2133a77
update
WintersMontagne10335 Dec 9, 2025
37ea197
update
WintersMontagne10335 Dec 9, 2025
d694f46
update
WintersMontagne10335 Dec 10, 2025
8970a99
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 10, 2025
351773b
update
WintersMontagne10335 Dec 10, 2025
5a6f478
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 11, 2025
187670b
update
WintersMontagne10335 Dec 11, 2025
4dd58d8
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 11, 2025
502d99f
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 12, 2025
4ae3219
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 12, 2025
b5e0d06
update for ci dcu
WintersMontagne10335 Dec 13, 2025
a2ad576
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 13, 2025
1d818f5
update cmakelists
WintersMontagne10335 Dec 14, 2025
254ee30
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 14, 2025
715e508
update
WintersMontagne10335 Dec 14, 2025
48601cd
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 15, 2025
b2bd396
Sink the judgment logic down to the C++ layer
WintersMontagne10335 Dec 15, 2025
22a69c6
update
WintersMontagne10335 Dec 15, 2025
719e8a9
update
WintersMontagne10335 Dec 15, 2025
0898713
update op_build_gen
WintersMontagne10335 Dec 18, 2025
b51d436
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 18, 2025
53590b5
Merge remote-tracking branch 'upstream/develop' into hackathon9th06planb
WintersMontagne10335 Dec 19, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
update
  • Loading branch information
WintersMontagne10335 committed Dec 1, 2025
commit bcbeb54125e24d45aeeb2793bf302fdfaa6a27ab
19 changes: 19 additions & 0 deletions paddle/phi/ops/yaml/op_compat.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2541,6 +2541,25 @@
max_grad : GetReduceGradExpectedKernelType
manual_signature : [max]

- op : max_pool2d_with_dilations
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这个是一个新增op,需要修改op_compat.yaml吗

backward : max_pool2d_with_dilations_grad
inputs :
{x : X}
outputs :
{out : Out}
attrs :
{kernel_size : ksize}
int_array:
kernel_size :
data_type : int
support_tensor : true
get_expected_kernel_type :
pool2d : GetPoolExpectedKernelType
max_pool2d_with_dilations_grad : GetPoolExpectedKernelType
extra :
attrs : [bool use_mkldnn = false, bool use_onednn = false, bool use_quantizer = false,
str mkldnn_data_type = "float32", str onednn_data_type = "", bool is_test = false]

- op : max_pool2d_with_index
inputs :
{x : X}
Expand Down
72 changes: 70 additions & 2 deletions test/legacy_test/test_pool2d_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -739,7 +739,7 @@ def lp_pool2d_wrapper(

class TestPool2D_Op_Mixin:
def setUp(self):
self.op_type = "pool2d"
self.op_type = "max_pool2d_with_dilations"
self.use_cudnn = False
self.init_kernel_type()
self.use_onednn = False
Expand Down Expand Up @@ -1849,6 +1849,74 @@ class TestPool2D_Op(TestPool2D_Op_Mixin, OpTest):
class TestPool2D_Max_Dilation(TestPool2D_Op):
"""Basic NCHW dilation test"""

def setUp(self):
self.op_type = "max_pool2d_with_dilations"
self.use_cudnn = False
self.init_kernel_type()
self.use_onednn = False
self.init_data_type()
self.init_test_case()
self.padding_algorithm = "EXPLICIT"
self.init_paddings()
self.init_dilations()
self.init_global_pool()
self.init_kernel_type()
self.init_pool_type()
self.init_ceil_mode()
self.init_exclusive()
self.init_adaptive()
self.init_data_format()
self.init_shape()

if self.is_bfloat16_op():
input = np.random.random(self.shape).astype(np.float32)
else:
input = np.random.random(self.shape).astype(self.dtype)

output = pool2D_forward_naive(
input,
self.ksize,
self.strides,
self.paddings,
self.dilations,
self.global_pool,
self.ceil_mode,
self.exclusive,
self.adaptive,
self.data_format,
self.pool_type,
self.padding_algorithm,
)

if self.is_bfloat16_op():
output = convert_float_to_uint16(output)
self.inputs = {'X': convert_float_to_uint16(input)}
else:
output = output.astype(self.dtype)
self.inputs = {'X': OpTest.np_dtype_to_base_dtype(input)}

self.outputs = {'Out': output}

self.attrs = {
'strides': self.strides,
'paddings': self.paddings,
'dilations': self.dilations,
'ksize': self.ksize,
'pooling_type': self.pool_type,
'global_pooling': self.global_pool,
'use_cudnn': self.use_cudnn,
'use_onednn': self.use_onednn,
'ceil_mode': self.ceil_mode,
'data_format': self.data_format,
'exclusive': self.exclusive,
'adaptive': self.adaptive,
"padding_algorithm": self.padding_algorithm,
}
if self.use_cudnn:
self.python_api = pool2d_wrapper_use_cudnn_with_dilations
else:
self.python_api = pool2d_wrapper_not_use_cudnn_with_dilations

def init_pool_type(self):
self.pool_type = "max"
self.pool2D_forward_naive = max_pool2d_with_dilations_forward_naive
Expand All @@ -1861,7 +1929,7 @@ def init_test_case(self):
self.strides = [1, 1]

def init_paddings(self):
self.paddings = [1, 1]
self.paddings = [0, 0]

def init_global_pool(self):
self.global_pool = False
Expand Down