Skip to content

Commit

Permalink
delete bf16 of cross entropy (PaddlePaddle#53922)
Browse files Browse the repository at this point in the history
* delete bf16 of cross entropy

* delete bf16 of cross entropy
  • Loading branch information
ZhangDY-6483 committed May 19, 2023
1 parent eb193d8 commit 69d3f4e
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 61 deletions.
3 changes: 1 addition & 2 deletions paddle/phi/kernels/gpu/cross_entropy_grad_kernel.cu
Original file line number Diff line number Diff line change
Expand Up @@ -297,8 +297,7 @@ PD_REGISTER_KERNEL(cross_entropy_with_softmax_grad,
phi::CrossEntropyWithSoftmaxGradKernel,
float,
double,
phi::dtype::float16,
phi::dtype::bfloat16) {}
phi::dtype::float16) {}
#else
PD_REGISTER_KERNEL(cross_entropy_with_softmax_grad,
GPU,
Expand Down
3 changes: 1 addition & 2 deletions paddle/phi/kernels/gpu/cross_entropy_kernel.cu
Original file line number Diff line number Diff line change
Expand Up @@ -1479,8 +1479,7 @@ PD_REGISTER_KERNEL(cross_entropy_with_softmax,
phi::CrossEntropyWithSoftmaxKernel,
float,
double,
phi::dtype::float16,
phi::dtype::bfloat16) {}
phi::dtype::float16) {}
#else
PD_REGISTER_KERNEL(cross_entropy_with_softmax,
GPU,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import unittest

import numpy as np
from eager_op_test import OpTest, convert_float_to_uint16, paddle_static_guard
from eager_op_test import OpTest, paddle_static_guard
from test_softmax_op import stable_softmax

import paddle
Expand Down Expand Up @@ -918,62 +918,6 @@ def initParams(self):
self.use_softmax = True


@unittest.skipIf(
not core.is_compiled_with_cuda()
or not core.is_bfloat16_supported(core.CUDAPlace(0)),
"core is not compiled with CUDA and not support the bfloat16",
)
class TestSoftmaxWithCrossEntropyOpBF16(TestSoftmaxWithCrossEntropyOp):
def setUp(self):
self.initParams()
self.op_type = "softmax_with_cross_entropy"
self.dtype = np.uint16

# NOTE: numpy bf16 have very low accuracy, use float32 for numpy check.
date_type = np.float32
logits = getattr(
self,
"logits",
np.random.uniform(0.1, 1.0, self.shape).astype(date_type),
)
softmax = np.apply_along_axis(stable_softmax, self.axis, logits)

axis_dim = self.shape[self.axis]
self.shape[self.axis] = 1
labels = np.random.randint(0, axis_dim, self.shape, dtype="int64")

loss = cross_entropy(softmax, labels, self.soft_label, self.axis)

self.inputs = {
"Logits": convert_float_to_uint16(logits),
"Label": labels,
}
self.outputs = {
"Softmax": convert_float_to_uint16(softmax),
"Loss": convert_float_to_uint16(loss),
}
self.attrs = {
"numeric_stable_mode": self.numeric_stable_mode,
"soft_label": self.soft_label,
}
if self.axis != -1:
self.attrs['axis'] = self.axis

def test_check_output(self):
place = core.CUDAPlace(0)
if self.python_api is not None:
self.check_output_with_place(place)
self.check_output_with_place(place, atol=1e-2)

def test_check_grad(self):
place = core.CUDAPlace(0)
if self.python_api is not None:
self.check_grad_with_place(place, ["Logits"], "Loss")
self.check_grad_with_place(
place, ["Logits"], "Loss", max_relative_error=0.1
)


class TestSoftmaxWithCrossEntropyOpError(unittest.TestCase):
def test_errors(self):
with program_guard(Program(), Program()):
Expand Down

0 comments on commit 69d3f4e

Please sign in to comment.