@@ -205,12 +205,13 @@ struct XPURoundFunctor : public funcs::BaseActivationFunctor<T> {
205205 const DenseTensor& x,
206206 DenseTensor* out) const {
207207 using XPUType = typename XPUTypeTrait<T>::Type;
208- int r = xpu::round<XPUType>(dev_ctx.x_context (),
209- reinterpret_cast <const XPUType*>(x.data <T>()),
210- reinterpret_cast <XPUType*>(out->data <T>()),
211- x.numel (),
212- decimals);
213- PADDLE_ENFORCE_XDNN_SUCCESS (r, " round" );
208+ int r = xpu::paddle_round<XPUType>(
209+ dev_ctx.x_context (),
210+ reinterpret_cast <const XPUType*>(x.data <T>()),
211+ reinterpret_cast <XPUType*>(out->data <T>()),
212+ x.numel (),
213+ decimals);
214+ PADDLE_ENFORCE_XDNN_SUCCESS (r, " paddle_round" );
214215 }
215216};
216217
@@ -344,20 +345,20 @@ struct XPUSiluFunctor : public funcs::BaseActivationFunctor<T> {
344345 if (std::getenv (" XPU_PADDLE_ACT_LUT" ) != nullptr ) {
345346 if (!std::is_same<T, ::phi::dtype::bfloat16>::value) {
346347 // use fast_swish if NOT bf16
347- int r = xpu::fast_swish (
348+ int r = xpu::fast_silu (
348349 xpu_context, x_data, y_data, x.numel (), nullptr , nullptr );
349- PADDLE_ENFORCE_XDNN_SUCCESS (r, " fast_swish " );
350+ PADDLE_ENFORCE_XDNN_SUCCESS (r, " fast_silu " );
350351 } else {
351352 // use plain swish
352- int r = xpu::swish (
353- xpu_context, x_data, y_data, x.numel (), nullptr , nullptr );
354- PADDLE_ENFORCE_XDNN_SUCCESS (r, " swish " );
353+ int r =
354+ xpu::silu ( xpu_context, x_data, y_data, x.numel (), nullptr , nullptr );
355+ PADDLE_ENFORCE_XDNN_SUCCESS (r, " silu " );
355356 }
356357 } else {
357358 // use plain swish
358359 int r =
359- xpu::swish (xpu_context, x_data, y_data, x.numel (), nullptr , nullptr );
360- PADDLE_ENFORCE_XDNN_SUCCESS (r, " swish " );
360+ xpu::silu (xpu_context, x_data, y_data, x.numel (), nullptr , nullptr );
361+ PADDLE_ENFORCE_XDNN_SUCCESS (r, " silu " );
361362 }
362363 }
363364};
0 commit comments