diff --git a/python/paddle/nn/functional/pooling.py b/python/paddle/nn/functional/pooling.py index 72df74b50e07e..8b90eafc90536 100755 --- a/python/paddle/nn/functional/pooling.py +++ b/python/paddle/nn/functional/pooling.py @@ -136,8 +136,8 @@ def _update_padding_nd(padding, num_dims, channel_last=False, ceil_mode=False): if padding == "VALID": if ceil_mode is not False: raise ValueError( - "When Attr(padding) is \"VALID\", Attr(ceil_mode) must be False. " - "Received ceil_mode: True." + 'When Attr(padding) is "VALID", Attr(ceil_mode) must be False. ' + 'Received ceil_mode: True.' ) padding_algorithm = "VALID" diff --git a/python/paddle/optimizer/optimizer.py b/python/paddle/optimizer/optimizer.py index 156e733ed172b..a626a66b0a172 100644 --- a/python/paddle/optimizer/optimizer.py +++ b/python/paddle/optimizer/optimizer.py @@ -801,7 +801,7 @@ def _global_learning_rate(self, program=None): def _append_optimize_op(self, block, param_and_grad): """append optimize operator to block and return all the added optimize_op""" raise NotImplementedError( - "Class \"Optimizer\" connot be used directly as an optimizer, please use its subclasses such as \"Adam\"" + 'Class "Optimizer" connot be used directly as an optimizer, please use its subclasses such as "Adam"' ) def _create_param_lr(self, param_and_grad): diff --git a/test/cinn/test_paddle_model_convertor.py b/test/cinn/test_paddle_model_convertor.py index bd2fb86de4130..ebb1d1e4e63a4 100644 --- a/test/cinn/test_paddle_model_convertor.py +++ b/test/cinn/test_paddle_model_convertor.py @@ -40,7 +40,7 @@ parser.add_argument( "-m", "--model_filename", - help="The filename of model file, default \"__model__\"", + help='The filename of model file, default "__model__"', type=str, default="__model__", ) @@ -103,7 +103,7 @@ def setUp(self): self.params_filename = args.params_filename logger.info( - f"Run Model From \"{self.model_dir}\", which model filename is \"{self.model_filename}\", and parameter filename is \"{self.params_filename}\"" + f'Run Model From "{self.model_dir}", which model filename is "{self.model_filename}", and parameter filename is "{self.params_filename}"' ) self.load_paddle_program() diff --git a/test/legacy_test/op.py b/test/legacy_test/op.py index 1087998e8d6aa..1d29b294d40ff 100644 --- a/test/legacy_test/op.py +++ b/test/legacy_test/op.py @@ -273,15 +273,15 @@ def __call__(self, *args, **kwargs): if "type" in kwargs: if len(args) != 0: raise ValueError( - "Except the argument \"type\"," - "all of the other arguments should be keyword arguments." + 'Except the argument "type",' + 'all of the other arguments should be keyword arguments.' ) t = kwargs.pop("type") else: if len(args) != 1: raise ValueError( - "Except the argument \"type\"," - "all of the other arguments should be keyword arguments." + 'Except the argument "type",' + 'all of the other arguments should be keyword arguments.' ) t = args[0] diff --git a/test/legacy_test/test_operator_desc.py b/test/legacy_test/test_operator_desc.py index d0ee95854cd94..c4fc5e1ad24cb 100644 --- a/test/legacy_test/test_operator_desc.py +++ b/test/legacy_test/test_operator_desc.py @@ -35,7 +35,7 @@ def test_error_type(self): self.assertFail() except ValueError as a_err: self.assertEqual( - str(a_err), "Operator \"no_such_op\" has not been registered." + str(a_err), 'Operator "no_such_op" has not been registered.' ) def test_op_desc_creation(self):