Skip to content

Commit 16067f1

Browse files
authored
[CodeStyle] Clean trailing whitespace (part1) (#64828)
1 parent 17a78f6 commit 16067f1

File tree

17 files changed

+152
-152
lines changed

17 files changed

+152
-152
lines changed

.clang-format

+4-4
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,11 @@
66
# The basic usage is,
77
# clang-format -i -style=file PATH/TO/SOURCE/CODE
88
#
9-
# The -style=file implicit use ".clang-format" file located in one of
10-
# parent directory.
9+
# The -style=file implicit use ".clang-format" file located in one of
10+
# parent directory.
1111
# The -i means inplace change.
1212
#
13-
# The document of clang-format is
13+
# The document of clang-format is
1414
# http://clang.llvm.org/docs/ClangFormat.html
1515
# http://clang.llvm.org/docs/ClangFormatStyleOptions.html
1616
---
@@ -20,7 +20,7 @@ IndentWidth: 2
2020
TabWidth: 2
2121
ContinuationIndentWidth: 4
2222
AccessModifierOffset: -1 # The private/protected/public has no indent in class
23-
Standard: Cpp11
23+
Standard: Cpp11
2424
AllowAllParametersOfDeclarationOnNextLine: true
2525
BinPackParameters: false
2626
BinPackArguments: false

cmake/PaddleConfig.cmake.in

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
get_filename_component(PADDLE_INSTALL_PREFIX "${CMAKE_CURRENT_LIST_FILE}/../.." ABSOLUTE)
1313

1414
# include directories
15-
set(PADDLE_INCLUDE_DIRS
15+
set(PADDLE_INCLUDE_DIRS
1616
${PADDLE_INSTALL_PREFIX}/include
1717
${PADDLE_INSTALL_PREFIX}/include/third_party
1818
)

paddle/cinn/hlir/pe/schedule_param.proto

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
// Copyright (c) 2021 CINN Authors. All Rights Reserved.
2-
//
2+
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
55
// You may obtain a copy of the License at
6-
//
6+
//
77
// http://www.apache.org/licenses/LICENSE-2.0
8-
//
8+
//
99
// Unless required by applicable law or agreed to in writing, software
1010
// distributed under the License is distributed on an "AS IS" BASIS,
1111
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

paddle/cinn/ir/group_schedule/config/tileconfig_desc.proto

+4-4
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
// Copyright (c) 2022 CINN Authors. All Rights Reserved.
2-
//
2+
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
55
// You may obtain a copy of the License at
6-
//
6+
//
77
// http://www.apache.org/licenses/LICENSE-2.0
8-
//
8+
//
99
// Unless required by applicable law or agreed to in writing, software
1010
// distributed under the License is distributed on an "AS IS" BASIS,
1111
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -36,7 +36,7 @@ message TileConfig{
3636
message TileData{
3737
int32 priority=1;
3838
BucketInfo bucket_info =2;
39-
TileConfig tile_config =3;
39+
TileConfig tile_config =3;
4040
}
4141

4242
message TileDatabase{

paddle/cinn/ir/schedule/schedule_desc.proto

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
// Copyright (c) 2022 CINN Authors. All Rights Reserved.
2-
//
2+
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
55
// You may obtain a copy of the License at
6-
//
6+
//
77
// http://www.apache.org/licenses/LICENSE-2.0
8-
//
8+
//
99
// Unless required by applicable law or agreed to in writing, software
1010
// distributed under the License is distributed on an "AS IS" BASIS,
1111
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

paddle/fluid/distributed/ps.proto

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
2-
//
2+
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
55
// You may obtain a copy of the License at
6-
//
6+
//
77
// http://www.apache.org/licenses/LICENSE-2.0
8-
//
8+
//
99
// Unless required by applicable law or agreed to in writing, software
1010
// distributed under the License is distributed on an "AS IS" BASIS,
1111
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

paddle/fluid/distributed/rpc/rpc.proto

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
2-
//
2+
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
55
// You may obtain a copy of the License at
6-
//
6+
//
77
// http://www.apache.org/licenses/LICENSE-2.0
8-
//
8+
//
99
// Unless required by applicable law or agreed to in writing, software
1010
// distributed under the License is distributed on an "AS IS" BASIS,
1111
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

paddle/fluid/inference/api/demo_ci/run_windows_demo.bat

+8-8
Original file line numberDiff line numberDiff line change
@@ -65,12 +65,12 @@ if /i "%use_gpu%"=="Y" (
6565
set use_gpu=N
6666
)
6767

68-
rem set_path_vs_command_prompt
68+
rem set_path_vs_command_prompt
6969
:set_vcvarsall_dir
7070
SET /P vcvarsall_dir="Please input the path of visual studio command Prompt, such as C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat =======>"
7171
set tmp_var=!vcvarsall_dir!
7272
call:remove_space
73-
set vcvarsall_dir=!tmp_var!
73+
set vcvarsall_dir=!tmp_var!
7474
IF NOT EXIST "%vcvarsall_dir%" (
7575
echo "------------%vcvarsall_dir% not exist------------"
7676
goto set_vcvarsall_dir
@@ -104,18 +104,18 @@ if EXIST "%source_path%\%model_name%.tar.gz" (
104104
SET /P python_path="Please input the path of python.exe, such as C:\Python37\python.exe =======>"
105105
set tmp_var=!python_path!
106106
call:remove_space
107-
set python_path=!tmp_var!
107+
set python_path=!tmp_var!
108108
if "!python_path!"=="" (
109109
set python_path=python.exe
110110
) else (
111111
if NOT exist "!python_path!" (
112-
echo "------------!python_path! not exist------------"
112+
echo "------------!python_path! not exist------------"
113113
goto:eof
114-
)
114+
)
115115
)
116116
md %source_path%\%model_name%
117117
!python_path! %source_path%\untar_model.py %source_path%\%model_name%.tar.gz %source_path%\%model_name%
118-
118+
119119
SET error_code=N
120120
if "%model_name%"=="mobilenet" (
121121
if NOT EXIST "%source_path%\%model_name%\model" set error_code=Y
@@ -127,7 +127,7 @@ if EXIST "%source_path%\%model_name%.tar.gz" (
127127
del /f /s /q "%source_path%\%model_name%\*.*" >nul 2>&1
128128
rd /s /q "%source_path%\%model_name%" >nul 2>&1
129129
goto:eof
130-
)
130+
)
131131
)
132132
)
133133

@@ -201,7 +201,7 @@ if /i "%use_gpu%"=="Y" (
201201
)
202202

203203
if exist "%build_path%\Release\%demo_name%.exe" (
204-
cd %build_path%\Release
204+
cd %build_path%\Release
205205
set GLOG_v=4
206206
if "%demo_name%"=="simple_on_word2vec" (
207207
%demo_name%.exe --dirname="%source_path%\%model_name%\%model_name%" --use_gpu="%use_gpu%"

paddle/fluid/inference/paddle_inference.map

+1-1
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@
7171
/* *paddle::framework*; */
7272
*paddle::framework::InitDevices*;
7373
*paddle::framework::InitMemoryMethod*;
74-
74+
7575
*paddle::framework::InterpreterCore*;
7676
*paddle::framework::Executor*;
7777
*paddle::framework::proto*;

paddle/fluid/ir_adaptor/translator/op_compat_info.cc.j2

+11-11
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
namespace paddle {
44
namespace translator {
5-
5+
66
OpNameNormalizer::OpNameNormalizer() {
77
op_name_mappings = {
88
{% for legacy_name, normalized_name in op_name_pairs.items() %}
@@ -11,35 +11,35 @@ OpNameNormalizer::OpNameNormalizer() {
1111
};
1212
op_arg_name_mappings = {
1313
{% for op_name, arg_name_mappings in op_arg_name_pairs.items() %}
14-
{
15-
"{{op_name}}",
14+
{
15+
"{{op_name}}",
1616
{
1717
{% for normalized_name, legacy_name in arg_name_mappings.items() %}
1818
{ "{{normalized_name}}", "{{legacy_name}}" },
1919
{% endfor %}
20-
},
20+
},
2121
},
2222
{% endfor %}
2323
};
2424
op_mutable_attributes = {
2525
{% for op_name, mutable_attributes in op_mutable_attributes.items() %}
26-
{
27-
"{{op_name}}",
26+
{
27+
"{{op_name}}",
2828
{
2929
{% for attribute_name in mutable_attributes %}
3030
"{{attribute_name}}",
3131
{% endfor %}
32-
},
32+
},
3333
},
3434
{% endfor %}
3535
};
3636
op_mutable_attribute_infos = {
3737
{% for op_name, mutable_attribute_infos in op_mutable_attribute_infos.items() %}
38-
{
39-
"{{op_name}}",
38+
{
39+
"{{op_name}}",
4040
{
4141
{% for attribute_name, attribute_info in mutable_attribute_infos.items() %}
42-
{
42+
{
4343
"{{attribute_name}}",
4444
{
4545
{% for candidate_var_name in attribute_info %}
@@ -48,7 +48,7 @@ OpNameNormalizer::OpNameNormalizer() {
4848
},
4949
},
5050
{% endfor %}
51-
},
51+
},
5252
},
5353
{% endfor %}
5454
};

paddle/fluid/jit/property.proto

+6-6
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ message TensorProto {
8484
// For int64.
8585
// When this field is present, the data_type field MUST be INT64
8686
repeated int64 int64_data = 7 [packed = true];
87-
87+
8888
// For double
8989
// Complex128 tensors are encoded as a single array of doubles,
9090
// with the real components appearing in odd numbered positions,
@@ -130,22 +130,22 @@ message ValueProto {
130130
STRINGS = 8;
131131
TENSORS = 9;
132132
}
133-
optional string name = 1;
134-
133+
optional string name = 1;
134+
135135
optional AttributeType type = 2; // discriminator that indicates which field below is in use
136-
136+
137137
// Exactly ONE of the following fields must be present
138138
optional float f = 3; // float
139139
optional int64 i = 4; // int
140140
optional bytes s = 5; // UTF-8 string
141141
optional TensorProto t = 6; // tensor value
142-
142+
143143
repeated float floats = 7; // list of floats
144144
repeated int64 ints = 8; // list of ints
145145
repeated bytes strings = 9; // list of UTF-8 strings
146146
repeated TensorProto tensors = 10; // list of tensors
147147
}
148148

149149
message PropertyVals {
150-
repeated ValueProto entrys=1;
150+
repeated ValueProto entrys=1;
151151
}

paddle/phi/core/distributed/auto_parallel/auto_parallel.proto

+21-21
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ message ProcessMeshProto {
2525
// There are no duplicate process ids within one process mesh.
2626
repeated int64 process_ids = 2;
2727

28-
// The name of each dimension.
28+
// The name of each dimension.
2929
repeated string dim_names = 3;
3030

3131
}
@@ -37,17 +37,17 @@ message TensorDistAttrProto {
3737
optional ProcessMeshProto process_mesh = 1;
3838

3939
// The length of dims_mapping is same as the length of the tensor shape.
40-
// The i-th dimension of the tensor will be sharded by the dims_mapping[i]-th dimension
40+
// The i-th dimension of the tensor will be sharded by the dims_mapping[i]-th dimension
4141
// of the above process mesh. If dims_mapping[i] is -1, the i-th dimension of the tensor
4242
// will not be sharded. For example, given a tensor shape [2, 6, 12], a process mesh
4343
// shape [2, 3] and a dims_mapping [-1, 1, 0], each sharded tensor will have a shape [2, 2, 6].
4444
repeated int64 dims_mapping = 2;
4545

46-
// The batch dimension of the corresponding tensor.
46+
// The batch dimension of the corresponding tensor.
4747
optional int64 batch_dim = 3;
4848

49-
// If the dynamic_dims[i] is True, the i-th dimension of the corresponding tensor
50-
// is dynamic changed. Otherwise, the i-th dimension of the tensor is static determined.
49+
// If the dynamic_dims[i] is True, the i-th dimension of the corresponding tensor
50+
// is dynamic changed. Otherwise, the i-th dimension of the tensor is static determined.
5151
repeated bool dynamic_dims = 4;
5252

5353
// This field is used to distinguish vars which are in same process_mesh and in different vpp chunk
@@ -60,16 +60,16 @@ message OperatorDistAttrProto {
6060
message TensorDistAttrMappingEntryProto {
6161
optional string name = 1;
6262
optional TensorDistAttrProto tensor_dist_attr = 2;
63-
}
63+
}
6464
// The key of this map is the input tensor name and the value is the distributed attribute
65-
// of the input tensor required by this corresponding operator.
66-
// The distributed attribute of the actual tensor may be not the same as that within
65+
// of the input tensor required by this corresponding operator.
66+
// The distributed attribute of the actual tensor may be not the same as that within
6767
// the distributed attribute of the operator.
6868
repeated TensorDistAttrMappingEntryProto input_dist_attrs = 1;
6969

7070
// The key of this map is the output tensor name and the value is the distributed attribute
71-
// of the output tensor required by this corresponding operator.
72-
// The distributed attribute of the actual tensor may be not the same as that within
71+
// of the output tensor required by this corresponding operator.
72+
// The distributed attribute of the actual tensor may be not the same as that within
7373
// the distributed attribute of the operator.
7474
repeated TensorDistAttrMappingEntryProto output_dist_attrs = 2;
7575

@@ -81,7 +81,7 @@ message OperatorDistAttrProto {
8181
// may shared the same distributed operator, the field is use for this scenario.
8282
optional string impl_type = 4;
8383

84-
// This field tells which distributed implementations of this corresponding operator
84+
// This field tells which distributed implementations of this corresponding operator
8585
// will be selected for the actual computation.
8686
optional int64 impl_idx = 5;
8787

@@ -115,13 +115,13 @@ message DeviceProto {
115115
optional string type = 4;
116116

117117
// The capability of this device.
118-
optional DeviceCapabilityProto capability = 5;
118+
optional DeviceCapabilityProto capability = 5;
119119
}
120120

121-
// This proto describes the capability of the link between two devices.
122-
message LinkCapabilityProto {
123-
optional int64 bandwidth = 1; // Bytes/s
124-
optional int64 latency = 2;
121+
// This proto describes the capability of the link between two devices.
122+
message LinkCapabilityProto {
123+
optional int64 bandwidth = 1; // Bytes/s
124+
optional int64 latency = 2;
125125
}
126126

127127
message LinkProto {
@@ -133,14 +133,14 @@ message LinkProto {
133133

134134
// Represent the link type.
135135
optional string type = 3;
136-
136+
137137
// The capability of this link.
138-
optional LinkCapabilityProto capability = 4;
138+
optional LinkCapabilityProto capability = 4;
139139
}
140140

141141
// DeviceMesh is used to organize devices and like n-dimension array.
142142
message DeviceMeshProto {
143-
// The global id of this mesh.
143+
// The global id of this mesh.
144144
optional string name = 1;
145145

146146
// The size of each dimension.
@@ -150,13 +150,13 @@ message DeviceMeshProto {
150150
// There are no duplicate device ids within one device mesh.
151151
repeated int64 device_ids = 3;
152152

153-
// The name of each dimension.
153+
// The name of each dimension.
154154
repeated string dim_names = 4;
155155

156156
// The devices of this mesh.
157157
repeated DeviceProto devices = 5;
158158

159-
// The links are between devices.
159+
// The links are between devices.
160160
repeated LinkProto links = 6;
161161
}
162162

0 commit comments

Comments
 (0)