Skip to content

Commit

Permalink
Merge pull request apache#1261 from apache/dev-1.0.3
Browse files Browse the repository at this point in the history
Preparing Linkis release v1.0.3-rc1
  • Loading branch information
leeebai authored Dec 31, 2021
2 parents 6798c9b + 872d71e commit d5dd961
Show file tree
Hide file tree
Showing 8 changed files with 143 additions and 16 deletions.
54 changes: 54 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

name: incubator-linkis ci actions

on:
push:
pull_request:

jobs:
build:

runs-on: ubuntu-latest

strategy:
matrix:
node-version: [14.17.3]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/

steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
distribution: 'adopt'
java-version: 8
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
- name: Build backend by maven
run: |
mvn -N install
mvn clean package
- name: Build frontend by node.js
run: |
cd web
npm install
npm run build
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ class JarUDFLoadECMHook extends ECMHook with Logging {
info("start loading UDFs")
val udfInfos = UDFClient.getUdfInfos(request.user,"udf").filter{ info => info.getUdfType == 0 && info.getExpire == false && StringUtils.isNotBlank(info.getPath) && info.getLoad == true }
udfInfos.foreach{ udfInfo =>
LaunchConstants.addPathToClassPath(pel.environment, udfInfo.getPath)
LaunchConstants.addPathToUDFPath(pel.environment, udfInfo.getPath)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,4 @@ wds.linkis.engineconn.debug.enable=true
#wds.linkis.keytab.enable=true
wds.linkis.engineconn.plugin.default.class=org.apache.linkis.engineplugin.hive.HiveEngineConnPlugin
wds.linkis.bdp.hive.init.sql.enable=true
wds.linkis.engine.connector.hooks=org.apache.linkis.engineconn.computation.executor.hook.ComputationEngineConnHook,org.apache.linkis.engineconn.computation.executor.hook.JarUdfEngineHook
wds.linkis.engine.connector.hooks=org.apache.linkis.engineconn.computation.executor.hook.ComputationEngineConnHook,org.apache.linkis.engineplugin.hive.hook.HiveAddJarsEngineHook,org.apache.linkis.engineconn.computation.executor.hook.JarUdfEngineHook
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ import org.apache.linkis.engineplugin.hive.executor.HiveEngineConnExecutor
import org.apache.linkis.manager.label.entity.Label
import org.apache.linkis.manager.label.entity.engine.{CodeLanguageLabel, RunType}
import org.apache.commons.lang.StringUtils
import org.apache.linkis.common.conf.CommonVars
import org.apache.linkis.manager.engineplugin.common.launch.process.Environment

import scala.collection.JavaConversions._

Expand All @@ -43,11 +45,20 @@ class HiveAddJarsEngineHook extends EngineConnHook with Logging {
override def afterExecutionExecute(engineCreationContext: EngineCreationContext, engineConn: EngineConn): Unit = Utils.tryAndError {
val options = engineCreationContext.getOptions
var jars: String = ""
val udf_jars = CommonVars(Environment.UDF_JARS.toString, "", "UDF jar PAth").getValue
logger.info("udf jar_path:" + udf_jars)
options foreach {
case (key, value) => if (JARS.equals(key)) {
jars = value
}
}
if (StringUtils.isNotEmpty(udf_jars)) {
if (StringUtils.isNotEmpty(jars)) {
jars = jars + "," + udf_jars
} else {
jars = udf_jars
}
}
val codeLanguageLabel = new CodeLanguageLabel
codeLanguageLabel.setCodeType(RunType.HIVE.toString)
val labels = Array[Label[_]](codeLanguageLabel)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,16 +33,13 @@ import org.apache.linkis.manager.label.entity.Label
import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel
import org.apache.linkis.protocol.UserWithCreator
import org.apache.commons.lang.StringUtils

import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import org.apache.linkis.hadoop.common.conf.HadoopConf


/**

*
* @Date 2020/10/23
*/
class SparkSubmitProcessEngineConnLaunchBuilder private extends JavaProcessEngineConnLaunchBuilder {

private[this] val fsRoot = "hdfs://"
Expand Down Expand Up @@ -172,9 +169,11 @@ class SparkSubmitProcessEngineConnLaunchBuilder private extends JavaProcessEngin

//addOpt("--jars",Some(ENGINEMANAGER_JAR.getValue))
// info("No need to add jars for " + _jars.map(fromPath).exists(x => x.equals("hdfs:///")).toString())
_jars = _jars.filter(_.isNotBlankPath())

if(_jars.nonEmpty) {
if (_jars.isEmpty) {
_jars += AbsolutePath("")
}
_jars += AbsolutePath(variable(UDF_JARS))
if (_jars.nonEmpty) {
addList("--jars", _jars.map(fromPath))
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,15 @@

package org.apache.linkis.manager.engineplugin.common.launch.process

import org.apache.commons.lang.StringUtils


object Environment extends Enumeration {

type Environment = Value
val USER, ECM_HOME, PWD, PATH, SHELL, JAVA_HOME, CLASSPATH,
HADOOP_HOME, HADOOP_CONF_DIR, HIVE_CONF_DIR, LOG_DIRS, TEMP_DIRS,
ECM_HOST, ECM_PORT, RANDOM_PORT, SERVICE_DISCOVERY,EUREKA_PREFER_IP,ENGINECONN_ENVKEYS = Value
ECM_HOST, ECM_PORT, RANDOM_PORT, SERVICE_DISCOVERY,EUREKA_PREFER_IP, UDF_JARS, ENGINECONN_ENVKEYS = Value

def variable(environment: Environment): String = LaunchConstants.EXPANSION_MARKER_LEFT + environment + LaunchConstants.EXPANSION_MARKER_RIGHT

Expand All @@ -46,4 +48,12 @@ object LaunchConstants {
env.put(Environment.CLASSPATH.toString, v)
}

def addPathToUDFPath(env: java.util.Map[String, String], value: String): Unit = {
if (StringUtils.isBlank(value)) return
val v = if(env.containsKey(Environment.UDF_JARS.toString)) {
env.get(Environment.UDF_JARS.toString) + "," + value
} else value
env.put(Environment.UDF_JARS.toString, v)
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->

<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
Expand Down Expand Up @@ -67,6 +67,45 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-log4j2</artifactId>
<version>${spring.boot.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-jul</artifactId>
</exclusion>
</exclusions>
</dependency>

<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>${log4j2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>${log4j2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-jul</artifactId>
<version>${log4j2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
</dependency>

<dependency>
Expand Down
24 changes: 19 additions & 5 deletions scalastyle-config.xml
Original file line number Diff line number Diff line change
@@ -1,3 +1,19 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!--
If you wish to turn off checking for a section of code, you can put a comment in the source
before and after the section, with the following syntax:
Expand Down Expand Up @@ -187,7 +203,6 @@ This file is divided into 3 sections:
<!-- Custom checker -->
<!-- ================================================================================ -->

<!-- As of SPARK-7977 all printlns need to be wrapped in '// scalastyle:off/on println' -->
<check customId="println" level="error" class="org.scalastyle.file.RegexChecker" enabled="true">
<parameters>
<parameter name="regex">println</parameter>
Expand All @@ -198,7 +213,6 @@ This file is divided into 3 sections:
// scalastyle:on println]]></customMessage>
</check>

<!-- As of SPARK-9613 JavaConversions should be replaced with JavaConverters-->
<check customId="javaconversions" level="error" class="org.scalastyle.scalariform.TokenChecker" enabled="true">
<parameters>
<parameter name="regex">JavaConversions</parameter>
Expand Down Expand Up @@ -486,11 +500,11 @@ This file is divided into 3 sections:

<check level="error" class="org.scalastyle.scalariform.ImportOrderChecker" enabled="false">
<parameters>
<parameter name="groups">java,scala,3rdParty,spark</parameter>
<parameter name="groups">java,scala,3rdParty,linkis</parameter>
<parameter name="group.java">javax?\..*</parameter>
<parameter name="group.scala">scala\..*</parameter>
<parameter name="group.3rdParty">(?!org\.apache\.spark\.).*</parameter>
<parameter name="group.spark">org\.apache\.spark\..*</parameter>
<parameter name="group.3rdParty">(?!org\.apache\.linkis\.).*</parameter>
<parameter name="group.linkis">org\.apache\.linkis\..*</parameter>
</parameters>
</check>

Expand Down

0 comments on commit d5dd961

Please sign in to comment.