Skip to content

Commit

Permalink
[CodeStyle] Clean trailing whitespace (part1) (PaddlePaddle#64828)
Browse files Browse the repository at this point in the history
  • Loading branch information
SigureMo authored Jun 3, 2024
1 parent 17a78f6 commit 16067f1
Show file tree
Hide file tree
Showing 17 changed files with 152 additions and 152 deletions.
8 changes: 4 additions & 4 deletions .clang-format
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@
# The basic usage is,
# clang-format -i -style=file PATH/TO/SOURCE/CODE
#
# The -style=file implicit use ".clang-format" file located in one of
# parent directory.
# The -style=file implicit use ".clang-format" file located in one of
# parent directory.
# The -i means inplace change.
#
# The document of clang-format is
# The document of clang-format is
# http://clang.llvm.org/docs/ClangFormat.html
# http://clang.llvm.org/docs/ClangFormatStyleOptions.html
---
Expand All @@ -20,7 +20,7 @@ IndentWidth: 2
TabWidth: 2
ContinuationIndentWidth: 4
AccessModifierOffset: -1 # The private/protected/public has no indent in class
Standard: Cpp11
Standard: Cpp11
AllowAllParametersOfDeclarationOnNextLine: true
BinPackParameters: false
BinPackArguments: false
Expand Down
2 changes: 1 addition & 1 deletion cmake/PaddleConfig.cmake.in
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
get_filename_component(PADDLE_INSTALL_PREFIX "${CMAKE_CURRENT_LIST_FILE}/../.." ABSOLUTE)

# include directories
set(PADDLE_INCLUDE_DIRS
set(PADDLE_INCLUDE_DIRS
${PADDLE_INSTALL_PREFIX}/include
${PADDLE_INSTALL_PREFIX}/include/third_party
)
Expand Down
6 changes: 3 additions & 3 deletions paddle/cinn/hlir/pe/schedule_param.proto
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
// Copyright (c) 2021 CINN Authors. All Rights Reserved.
//
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//
// http://www.apache.org/licenses/LICENSE-2.0
//
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Expand Down
8 changes: 4 additions & 4 deletions paddle/cinn/ir/group_schedule/config/tileconfig_desc.proto
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
// Copyright (c) 2022 CINN Authors. All Rights Reserved.
//
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//
// http://www.apache.org/licenses/LICENSE-2.0
//
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Expand Down Expand Up @@ -36,7 +36,7 @@ message TileConfig{
message TileData{
int32 priority=1;
BucketInfo bucket_info =2;
TileConfig tile_config =3;
TileConfig tile_config =3;
}

message TileDatabase{
Expand Down
6 changes: 3 additions & 3 deletions paddle/cinn/ir/schedule/schedule_desc.proto
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
// Copyright (c) 2022 CINN Authors. All Rights Reserved.
//
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//
// http://www.apache.org/licenses/LICENSE-2.0
//
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/distributed/ps.proto
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
//
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//
// http://www.apache.org/licenses/LICENSE-2.0
//
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/distributed/rpc/rpc.proto
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//
// http://www.apache.org/licenses/LICENSE-2.0
//
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Expand Down
16 changes: 8 additions & 8 deletions paddle/fluid/inference/api/demo_ci/run_windows_demo.bat
Original file line number Diff line number Diff line change
Expand Up @@ -65,12 +65,12 @@ if /i "%use_gpu%"=="Y" (
set use_gpu=N
)

rem set_path_vs_command_prompt
rem set_path_vs_command_prompt
:set_vcvarsall_dir
SET /P vcvarsall_dir="Please input the path of visual studio command Prompt, such as C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat =======>"
set tmp_var=!vcvarsall_dir!
call:remove_space
set vcvarsall_dir=!tmp_var!
set vcvarsall_dir=!tmp_var!
IF NOT EXIST "%vcvarsall_dir%" (
echo "------------%vcvarsall_dir% not exist------------"
goto set_vcvarsall_dir
Expand Down Expand Up @@ -104,18 +104,18 @@ if EXIST "%source_path%\%model_name%.tar.gz" (
SET /P python_path="Please input the path of python.exe, such as C:\Python37\python.exe =======>"
set tmp_var=!python_path!
call:remove_space
set python_path=!tmp_var!
set python_path=!tmp_var!
if "!python_path!"=="" (
set python_path=python.exe
) else (
if NOT exist "!python_path!" (
echo "------------!python_path! not exist------------"
echo "------------!python_path! not exist------------"
goto:eof
)
)
)
md %source_path%\%model_name%
!python_path! %source_path%\untar_model.py %source_path%\%model_name%.tar.gz %source_path%\%model_name%

SET error_code=N
if "%model_name%"=="mobilenet" (
if NOT EXIST "%source_path%\%model_name%\model" set error_code=Y
Expand All @@ -127,7 +127,7 @@ if EXIST "%source_path%\%model_name%.tar.gz" (
del /f /s /q "%source_path%\%model_name%\*.*" >nul 2>&1
rd /s /q "%source_path%\%model_name%" >nul 2>&1
goto:eof
)
)
)
)

Expand Down Expand Up @@ -201,7 +201,7 @@ if /i "%use_gpu%"=="Y" (
)

if exist "%build_path%\Release\%demo_name%.exe" (
cd %build_path%\Release
cd %build_path%\Release
set GLOG_v=4
if "%demo_name%"=="simple_on_word2vec" (
%demo_name%.exe --dirname="%source_path%\%model_name%\%model_name%" --use_gpu="%use_gpu%"
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/inference/paddle_inference.map
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
/* *paddle::framework*; */
*paddle::framework::InitDevices*;
*paddle::framework::InitMemoryMethod*;

*paddle::framework::InterpreterCore*;
*paddle::framework::Executor*;
*paddle::framework::proto*;
Expand Down
22 changes: 11 additions & 11 deletions paddle/fluid/ir_adaptor/translator/op_compat_info.cc.j2
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

namespace paddle {
namespace translator {

OpNameNormalizer::OpNameNormalizer() {
op_name_mappings = {
{% for legacy_name, normalized_name in op_name_pairs.items() %}
Expand All @@ -11,35 +11,35 @@ OpNameNormalizer::OpNameNormalizer() {
};
op_arg_name_mappings = {
{% for op_name, arg_name_mappings in op_arg_name_pairs.items() %}
{
"{{op_name}}",
{
"{{op_name}}",
{
{% for normalized_name, legacy_name in arg_name_mappings.items() %}
{ "{{normalized_name}}", "{{legacy_name}}" },
{% endfor %}
},
},
},
{% endfor %}
};
op_mutable_attributes = {
{% for op_name, mutable_attributes in op_mutable_attributes.items() %}
{
"{{op_name}}",
{
"{{op_name}}",
{
{% for attribute_name in mutable_attributes %}
"{{attribute_name}}",
{% endfor %}
},
},
},
{% endfor %}
};
op_mutable_attribute_infos = {
{% for op_name, mutable_attribute_infos in op_mutable_attribute_infos.items() %}
{
"{{op_name}}",
{
"{{op_name}}",
{
{% for attribute_name, attribute_info in mutable_attribute_infos.items() %}
{
{
"{{attribute_name}}",
{
{% for candidate_var_name in attribute_info %}
Expand All @@ -48,7 +48,7 @@ OpNameNormalizer::OpNameNormalizer() {
},
},
{% endfor %}
},
},
},
{% endfor %}
};
Expand Down
12 changes: 6 additions & 6 deletions paddle/fluid/jit/property.proto
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ message TensorProto {
// For int64.
// When this field is present, the data_type field MUST be INT64
repeated int64 int64_data = 7 [packed = true];

// For double
// Complex128 tensors are encoded as a single array of doubles,
// with the real components appearing in odd numbered positions,
Expand Down Expand Up @@ -130,22 +130,22 @@ message ValueProto {
STRINGS = 8;
TENSORS = 9;
}
optional string name = 1;
optional string name = 1;

optional AttributeType type = 2; // discriminator that indicates which field below is in use

// Exactly ONE of the following fields must be present
optional float f = 3; // float
optional int64 i = 4; // int
optional bytes s = 5; // UTF-8 string
optional TensorProto t = 6; // tensor value

repeated float floats = 7; // list of floats
repeated int64 ints = 8; // list of ints
repeated bytes strings = 9; // list of UTF-8 strings
repeated TensorProto tensors = 10; // list of tensors
}

message PropertyVals {
repeated ValueProto entrys=1;
repeated ValueProto entrys=1;
}
42 changes: 21 additions & 21 deletions paddle/phi/core/distributed/auto_parallel/auto_parallel.proto
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ message ProcessMeshProto {
// There are no duplicate process ids within one process mesh.
repeated int64 process_ids = 2;

// The name of each dimension.
// The name of each dimension.
repeated string dim_names = 3;

}
Expand All @@ -37,17 +37,17 @@ message TensorDistAttrProto {
optional ProcessMeshProto process_mesh = 1;

// The length of dims_mapping is same as the length of the tensor shape.
// The i-th dimension of the tensor will be sharded by the dims_mapping[i]-th dimension
// The i-th dimension of the tensor will be sharded by the dims_mapping[i]-th dimension
// of the above process mesh. If dims_mapping[i] is -1, the i-th dimension of the tensor
// will not be sharded. For example, given a tensor shape [2, 6, 12], a process mesh
// shape [2, 3] and a dims_mapping [-1, 1, 0], each sharded tensor will have a shape [2, 2, 6].
repeated int64 dims_mapping = 2;

// The batch dimension of the corresponding tensor.
// The batch dimension of the corresponding tensor.
optional int64 batch_dim = 3;

// If the dynamic_dims[i] is True, the i-th dimension of the corresponding tensor
// is dynamic changed. Otherwise, the i-th dimension of the tensor is static determined.
// If the dynamic_dims[i] is True, the i-th dimension of the corresponding tensor
// is dynamic changed. Otherwise, the i-th dimension of the tensor is static determined.
repeated bool dynamic_dims = 4;

// This field is used to distinguish vars which are in same process_mesh and in different vpp chunk
Expand All @@ -60,16 +60,16 @@ message OperatorDistAttrProto {
message TensorDistAttrMappingEntryProto {
optional string name = 1;
optional TensorDistAttrProto tensor_dist_attr = 2;
}
}
// The key of this map is the input tensor name and the value is the distributed attribute
// of the input tensor required by this corresponding operator.
// The distributed attribute of the actual tensor may be not the same as that within
// of the input tensor required by this corresponding operator.
// The distributed attribute of the actual tensor may be not the same as that within
// the distributed attribute of the operator.
repeated TensorDistAttrMappingEntryProto input_dist_attrs = 1;

// The key of this map is the output tensor name and the value is the distributed attribute
// of the output tensor required by this corresponding operator.
// The distributed attribute of the actual tensor may be not the same as that within
// of the output tensor required by this corresponding operator.
// The distributed attribute of the actual tensor may be not the same as that within
// the distributed attribute of the operator.
repeated TensorDistAttrMappingEntryProto output_dist_attrs = 2;

Expand All @@ -81,7 +81,7 @@ message OperatorDistAttrProto {
// may shared the same distributed operator, the field is use for this scenario.
optional string impl_type = 4;

// This field tells which distributed implementations of this corresponding operator
// This field tells which distributed implementations of this corresponding operator
// will be selected for the actual computation.
optional int64 impl_idx = 5;

Expand Down Expand Up @@ -115,13 +115,13 @@ message DeviceProto {
optional string type = 4;

// The capability of this device.
optional DeviceCapabilityProto capability = 5;
optional DeviceCapabilityProto capability = 5;
}

// This proto describes the capability of the link between two devices.
message LinkCapabilityProto {
optional int64 bandwidth = 1; // Bytes/s
optional int64 latency = 2;
// This proto describes the capability of the link between two devices.
message LinkCapabilityProto {
optional int64 bandwidth = 1; // Bytes/s
optional int64 latency = 2;
}

message LinkProto {
Expand All @@ -133,14 +133,14 @@ message LinkProto {

// Represent the link type.
optional string type = 3;

// The capability of this link.
optional LinkCapabilityProto capability = 4;
optional LinkCapabilityProto capability = 4;
}

// DeviceMesh is used to organize devices and like n-dimension array.
message DeviceMeshProto {
// The global id of this mesh.
// The global id of this mesh.
optional string name = 1;

// The size of each dimension.
Expand All @@ -150,13 +150,13 @@ message DeviceMeshProto {
// There are no duplicate device ids within one device mesh.
repeated int64 device_ids = 3;

// The name of each dimension.
// The name of each dimension.
repeated string dim_names = 4;

// The devices of this mesh.
repeated DeviceProto devices = 5;

// The links are between devices.
// The links are between devices.
repeated LinkProto links = 6;
}

Expand Down
Loading

0 comments on commit 16067f1

Please sign in to comment.