Skip to content

Commit

Permalink
[fix](tvf) fix FE cannot start when replay alter view from tvf (apa…
Browse files Browse the repository at this point in the history
  • Loading branch information
BePPPower authored Sep 20, 2024
1 parent acbf596 commit a2b9aa4
Show file tree
Hide file tree
Showing 6 changed files with 208 additions and 5 deletions.
9 changes: 4 additions & 5 deletions fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java
Original file line number Diff line number Diff line change
Expand Up @@ -729,13 +729,12 @@ public void replayModifyViewDef(AlterViewInfo alterViewInfo) throws MetaNotFound
try {
String viewName = view.getName();
view.setInlineViewDefWithSqlMode(inlineViewDef, alterViewInfo.getSqlMode());
try {
view.init();
} catch (UserException e) {
throw new DdlException("failed to init view stmt, reason=" + e.getMessage());
}
view.setNewFullSchema(newFullSchema);

// We do not need to init view here.
// During the `init` phase, some `Alter-View` statements will access the remote file system,
// but they should not access it during the metadata replay phase.

db.unregisterTable(viewName);
db.registerTable(view);

Expand Down
44 changes: 44 additions & 0 deletions regression-test/data/external_table_p0/tvf/test_hdfs_tvf.out
Original file line number Diff line number Diff line change
Expand Up @@ -615,3 +615,47 @@ string_col text Yes false \N NONE
string_col text Yes false \N NONE
tinyint_col tinyint Yes false \N NONE

-- !create_view --
0 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
1 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
10 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
100 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
101 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
102 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
103 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
104 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
105 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
106 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
107 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
108 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
109 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
11 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
110 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
111 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
112 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
113 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
114 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59
115 2 3 4 5 6.6 7.7 8.8 abc def ghiaaaaaa 2020-10-10 2020-10-10 11:12:59

-- !alter_view --
0
1
10
100
101
102
103
104
105
106
107
108
109
11
110
111
112
113
114
115

45 changes: 45 additions & 0 deletions regression-test/data/external_table_p0/tvf/upgrade/test.out
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !create_view --
0
1
10
100
101
102
103
104
105
106
107
108
109
11
110
111
112
113
114
115

-- !alter_view --
0
1
10
100
101
102
103
104
105
106
107
108
109
11
110
111
112
113
114
115

26 changes: 26 additions & 0 deletions regression-test/suites/external_table_p0/tvf/test_hdfs_tvf.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -312,6 +312,32 @@ suite("test_hdfs_tvf","external,hive,tvf,external_docker") {
"column_separator" = ",",
"format" = "${format}"); """


// test create view from tvf and alter view from tvf
uri = "${defaultFS}" + "/user/doris/preinstalled_data/csv_format_test/all_types.csv"
format = "csv"
sql """ DROP VIEW IF EXISTS test_hdfs_tvf_create_view;"""
sql """
create view test_hdfs_tvf_create_view as
select * from HDFS(
"uri" = "${uri}",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "${format}") order by c1;
"""

order_qt_create_view """ select * from test_hdfs_tvf_create_view order by c1 limit 20; """

sql """
alter view test_hdfs_tvf_create_view as
select c1 from HDFS(
"uri" = "${uri}",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "${format}") order by c1;
"""

order_qt_alter_view """ select * from test_hdfs_tvf_create_view order by c1 limit 20; """
} finally {
}
}
Expand Down
55 changes: 55 additions & 0 deletions regression-test/suites/external_table_p0/tvf/upgrade/load.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

suite("test_tvf_upgrade_load", "p0,external,hive,external_docker,external_docker_hive,restart_fe,upgrade_case") {
String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")

// It's okay to use random `hdfsUser`, but can not be empty.
def hdfsUserName = "doris"
def format = "csv"
def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
def uri = ""

String enabled = context.config.otherConfigs.get("enableHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
// test create view from tvf and alter view from tvf
uri = "${defaultFS}" + "/user/doris/preinstalled_data/csv_format_test/all_types.csv"
format = "csv"
sql """ DROP VIEW IF EXISTS test_hdfs_tvf_create_view;"""
sql """
create view test_hdfs_tvf_create_view as
select * from HDFS(
"uri" = "${uri}",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "${format}") order by c1;
"""
logger.info("View test_hdfs_tvf_create_view created")


sql """
alter view test_hdfs_tvf_create_view as
select c1 from HDFS(
"uri" = "${uri}",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "${format}") order by c1;
"""
logger.info("View test_hdfs_tvf_create_view altered")
}
}
34 changes: 34 additions & 0 deletions regression-test/suites/external_table_p0/tvf/upgrade/test.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

suite("test_tvf_upgrade_test", "p0,external,hive,external_docker,external_docker_hive,restart_fe,upgrade_case") {
String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")

// It's okay to use random `hdfsUser`, but can not be empty.
def hdfsUserName = "doris"
def format = "csv"
def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
def uri = ""

String enabled = context.config.otherConfigs.get("enableHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
order_qt_create_view """ select * from test_hdfs_tvf_create_view order by c1 limit 20; """

order_qt_alter_view """ select * from test_hdfs_tvf_create_view order by c1 limit 20; """
}
}

0 comments on commit a2b9aa4

Please sign in to comment.